litellm-proxy-extras 0.1.8__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of litellm-proxy-extras might be problematic. Click here for more details.

@@ -0,0 +1,4 @@
1
+ -- AlterTable
2
+ ALTER TABLE "LiteLLM_DailyUserSpend" ADD COLUMN "cache_creation_input_tokens" INTEGER NOT NULL DEFAULT 0,
3
+ ADD COLUMN "cache_read_input_tokens" INTEGER NOT NULL DEFAULT 0;
4
+
@@ -0,0 +1,45 @@
1
+ -- AlterTable
2
+ ALTER TABLE "LiteLLM_DailyTeamSpend" ADD COLUMN "cache_creation_input_tokens" INTEGER NOT NULL DEFAULT 0,
3
+ ADD COLUMN "cache_read_input_tokens" INTEGER NOT NULL DEFAULT 0;
4
+
5
+ -- CreateTable
6
+ CREATE TABLE "LiteLLM_DailyTagSpend" (
7
+ "id" TEXT NOT NULL,
8
+ "tag" TEXT NOT NULL,
9
+ "date" TEXT NOT NULL,
10
+ "api_key" TEXT NOT NULL,
11
+ "model" TEXT NOT NULL,
12
+ "model_group" TEXT,
13
+ "custom_llm_provider" TEXT,
14
+ "prompt_tokens" INTEGER NOT NULL DEFAULT 0,
15
+ "completion_tokens" INTEGER NOT NULL DEFAULT 0,
16
+ "cache_read_input_tokens" INTEGER NOT NULL DEFAULT 0,
17
+ "cache_creation_input_tokens" INTEGER NOT NULL DEFAULT 0,
18
+ "spend" DOUBLE PRECISION NOT NULL DEFAULT 0.0,
19
+ "api_requests" INTEGER NOT NULL DEFAULT 0,
20
+ "successful_requests" INTEGER NOT NULL DEFAULT 0,
21
+ "failed_requests" INTEGER NOT NULL DEFAULT 0,
22
+ "created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
23
+ "updated_at" TIMESTAMP(3) NOT NULL,
24
+
25
+ CONSTRAINT "LiteLLM_DailyTagSpend_pkey" PRIMARY KEY ("id")
26
+ );
27
+
28
+ -- CreateIndex
29
+ CREATE UNIQUE INDEX "LiteLLM_DailyTagSpend_tag_key" ON "LiteLLM_DailyTagSpend"("tag");
30
+
31
+ -- CreateIndex
32
+ CREATE INDEX "LiteLLM_DailyTagSpend_date_idx" ON "LiteLLM_DailyTagSpend"("date");
33
+
34
+ -- CreateIndex
35
+ CREATE INDEX "LiteLLM_DailyTagSpend_tag_idx" ON "LiteLLM_DailyTagSpend"("tag");
36
+
37
+ -- CreateIndex
38
+ CREATE INDEX "LiteLLM_DailyTagSpend_api_key_idx" ON "LiteLLM_DailyTagSpend"("api_key");
39
+
40
+ -- CreateIndex
41
+ CREATE INDEX "LiteLLM_DailyTagSpend_model_idx" ON "LiteLLM_DailyTagSpend"("model");
42
+
43
+ -- CreateIndex
44
+ CREATE UNIQUE INDEX "LiteLLM_DailyTagSpend_tag_date_api_key_model_custom_llm_pro_key" ON "LiteLLM_DailyTagSpend"("tag", "date", "api_key", "model", "custom_llm_provider");
45
+
@@ -0,0 +1,3 @@
1
+ -- DropIndex
2
+ DROP INDEX "LiteLLM_DailyTagSpend_tag_key";
3
+
@@ -0,0 +1,3 @@
1
+ -- AlterTable
2
+ ALTER TABLE "LiteLLM_VerificationToken" ADD COLUMN "allowed_routes" TEXT[] DEFAULT ARRAY[]::TEXT[];
3
+
@@ -0,0 +1,4 @@
1
+ -- AlterTable
2
+ ALTER TABLE "LiteLLM_SpendLogs" ADD COLUMN "proxy_server_request" JSONB DEFAULT '{}',
3
+ ADD COLUMN "session_id" TEXT;
4
+
@@ -0,0 +1,14 @@
1
+ -- CreateTable
2
+ CREATE TABLE "LiteLLM_ManagedVectorStoresTable" (
3
+ "vector_store_id" TEXT NOT NULL,
4
+ "custom_llm_provider" TEXT NOT NULL,
5
+ "vector_store_name" TEXT,
6
+ "vector_store_description" TEXT,
7
+ "vector_store_metadata" JSONB,
8
+ "created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
9
+ "updated_at" TIMESTAMP(3) NOT NULL,
10
+ "litellm_credential_name" TEXT,
11
+
12
+ CONSTRAINT "LiteLLM_ManagedVectorStoresTable_pkey" PRIMARY KEY ("vector_store_id")
13
+ );
14
+
@@ -169,6 +169,7 @@ model LiteLLM_VerificationToken {
169
169
  budget_duration String?
170
170
  budget_reset_at DateTime?
171
171
  allowed_cache_controls String[] @default([])
172
+ allowed_routes String[] @default([])
172
173
  model_spend Json @default("{}")
173
174
  model_max_budget Json @default("{}")
174
175
  budget_id String?
@@ -225,6 +226,8 @@ model LiteLLM_SpendLogs {
225
226
  requester_ip_address String?
226
227
  messages Json? @default("{}")
227
228
  response Json? @default("{}")
229
+ session_id String?
230
+ proxy_server_request Json? @default("{}")
228
231
  @@index([startTime])
229
232
  @@index([end_user])
230
233
  }
@@ -326,6 +329,8 @@ model LiteLLM_DailyUserSpend {
326
329
  custom_llm_provider String?
327
330
  prompt_tokens Int @default(0)
328
331
  completion_tokens Int @default(0)
332
+ cache_read_input_tokens Int @default(0)
333
+ cache_creation_input_tokens Int @default(0)
329
334
  spend Float @default(0.0)
330
335
  api_requests Int @default(0)
331
336
  successful_requests Int @default(0)
@@ -340,6 +345,60 @@ model LiteLLM_DailyUserSpend {
340
345
  @@index([model])
341
346
  }
342
347
 
348
+ // Track daily team spend metrics per model and key
349
+ model LiteLLM_DailyTeamSpend {
350
+ id String @id @default(uuid())
351
+ team_id String
352
+ date String
353
+ api_key String
354
+ model String
355
+ model_group String?
356
+ custom_llm_provider String?
357
+ prompt_tokens Int @default(0)
358
+ completion_tokens Int @default(0)
359
+ cache_read_input_tokens Int @default(0)
360
+ cache_creation_input_tokens Int @default(0)
361
+ spend Float @default(0.0)
362
+ api_requests Int @default(0)
363
+ successful_requests Int @default(0)
364
+ failed_requests Int @default(0)
365
+ created_at DateTime @default(now())
366
+ updated_at DateTime @updatedAt
367
+
368
+ @@unique([team_id, date, api_key, model, custom_llm_provider])
369
+ @@index([date])
370
+ @@index([team_id])
371
+ @@index([api_key])
372
+ @@index([model])
373
+ }
374
+
375
+ // Track daily team spend metrics per model and key
376
+ model LiteLLM_DailyTagSpend {
377
+ id String @id @default(uuid())
378
+ tag String
379
+ date String
380
+ api_key String
381
+ model String
382
+ model_group String?
383
+ custom_llm_provider String?
384
+ prompt_tokens Int @default(0)
385
+ completion_tokens Int @default(0)
386
+ cache_read_input_tokens Int @default(0)
387
+ cache_creation_input_tokens Int @default(0)
388
+ spend Float @default(0.0)
389
+ api_requests Int @default(0)
390
+ successful_requests Int @default(0)
391
+ failed_requests Int @default(0)
392
+ created_at DateTime @default(now())
393
+ updated_at DateTime @updatedAt
394
+
395
+ @@unique([tag, date, api_key, model, custom_llm_provider])
396
+ @@index([date])
397
+ @@index([tag])
398
+ @@index([api_key])
399
+ @@index([model])
400
+ }
401
+
343
402
 
344
403
  // Track the status of cron jobs running. Only allow one pod to run the job at a time
345
404
  model LiteLLM_CronJob {
@@ -366,3 +425,14 @@ model LiteLLM_ManagedFileTable {
366
425
  @@index([unified_file_id])
367
426
  }
368
427
 
428
+
429
+ model LiteLLM_ManagedVectorStoresTable {
430
+ vector_store_id String @id
431
+ custom_llm_provider String
432
+ vector_store_name String?
433
+ vector_store_description String?
434
+ vector_store_metadata Json?
435
+ created_at DateTime @default(now())
436
+ updated_at DateTime @updatedAt
437
+ litellm_credential_name String?
438
+ }
@@ -1,8 +1,11 @@
1
1
  import glob
2
2
  import os
3
3
  import random
4
+ import re
5
+ import shutil
4
6
  import subprocess
5
7
  import time
8
+ from datetime import datetime
6
9
  from pathlib import Path
7
10
  from typing import Optional
8
11
 
@@ -18,9 +21,30 @@ def str_to_bool(value: Optional[str]) -> bool:
18
21
  class ProxyExtrasDBManager:
19
22
  @staticmethod
20
23
  def _get_prisma_dir() -> str:
21
- """Get the path to the migrations directory"""
22
- migrations_dir = os.path.dirname(__file__)
23
- return migrations_dir
24
+ """
25
+ Get the path to the migrations directory
26
+
27
+ Set os.environ["LITELLM_MIGRATION_DIR"] to a custom migrations directory, to support baselining db in read-only fs.
28
+ """
29
+ custom_migrations_dir = os.getenv("LITELLM_MIGRATION_DIR")
30
+ pkg_migrations_dir = os.path.dirname(__file__)
31
+ if custom_migrations_dir:
32
+ # If migrations_dir exists, copy contents
33
+ if os.path.exists(custom_migrations_dir):
34
+ # Copy contents instead of directory itself
35
+ for item in os.listdir(pkg_migrations_dir):
36
+ src_path = os.path.join(pkg_migrations_dir, item)
37
+ dst_path = os.path.join(custom_migrations_dir, item)
38
+ if os.path.isdir(src_path):
39
+ shutil.copytree(src_path, dst_path, dirs_exist_ok=True)
40
+ else:
41
+ shutil.copy2(src_path, dst_path)
42
+ else:
43
+ # If directory doesn't exist, create it and copy everything
44
+ shutil.copytree(pkg_migrations_dir, custom_migrations_dir)
45
+ return custom_migrations_dir
46
+
47
+ return pkg_migrations_dir
24
48
 
25
49
  @staticmethod
26
50
  def _create_baseline_migration(schema_path: str) -> bool:
@@ -32,27 +56,29 @@ class ProxyExtrasDBManager:
32
56
  # Create migrations/0_init directory
33
57
  init_dir.mkdir(parents=True, exist_ok=True)
34
58
 
35
- # Generate migration SQL file
36
- migration_file = init_dir / "migration.sql"
59
+ database_url = os.getenv("DATABASE_URL")
37
60
 
38
61
  try:
39
- # Generate migration diff with increased timeout
62
+ # 1. Generate migration SQL file by comparing empty state to current db state
63
+ logger.info("Generating baseline migration...")
64
+ migration_file = init_dir / "migration.sql"
40
65
  subprocess.run(
41
66
  [
42
67
  "prisma",
43
68
  "migrate",
44
69
  "diff",
45
70
  "--from-empty",
46
- "--to-schema-datamodel",
47
- str(schema_path),
71
+ "--to-url",
72
+ database_url,
48
73
  "--script",
49
74
  ],
50
75
  stdout=open(migration_file, "w"),
51
76
  check=True,
52
77
  timeout=30,
53
- ) # 30 second timeout
78
+ )
54
79
 
55
- # Mark migration as applied with increased timeout
80
+ # 3. Mark the migration as applied since it represents current state
81
+ logger.info("Marking baseline migration as applied...")
56
82
  subprocess.run(
57
83
  [
58
84
  "prisma",
@@ -72,8 +98,10 @@ class ProxyExtrasDBManager:
72
98
  )
73
99
  return False
74
100
  except subprocess.CalledProcessError as e:
75
- logger.warning(f"Error creating baseline migration: {e}")
76
- return False
101
+ logger.warning(
102
+ f"Error creating baseline migration: {e}, {e.stderr}, {e.stdout}"
103
+ )
104
+ raise e
77
105
 
78
106
  @staticmethod
79
107
  def _get_migration_names(migrations_dir: str) -> list:
@@ -83,8 +111,105 @@ class ProxyExtrasDBManager:
83
111
  return [Path(p).parent.name for p in migration_paths]
84
112
 
85
113
  @staticmethod
86
- def _resolve_all_migrations(migrations_dir: str):
87
- """Mark all existing migrations as applied"""
114
+ def _roll_back_migration(migration_name: str):
115
+ """Mark a specific migration as rolled back"""
116
+ subprocess.run(
117
+ ["prisma", "migrate", "resolve", "--rolled-back", migration_name],
118
+ timeout=60,
119
+ check=True,
120
+ capture_output=True,
121
+ )
122
+
123
+ @staticmethod
124
+ def _resolve_specific_migration(migration_name: str):
125
+ """Mark a specific migration as applied"""
126
+ subprocess.run(
127
+ ["prisma", "migrate", "resolve", "--applied", migration_name],
128
+ timeout=60,
129
+ check=True,
130
+ capture_output=True,
131
+ )
132
+
133
+ @staticmethod
134
+ def _resolve_all_migrations(migrations_dir: str, schema_path: str):
135
+ """
136
+ 1. Compare the current database state to schema.prisma and generate a migration for the diff.
137
+ 2. Run prisma migrate deploy to apply any pending migrations.
138
+ 3. Mark all existing migrations as applied.
139
+ """
140
+ database_url = os.getenv("DATABASE_URL")
141
+ diff_dir = (
142
+ Path(migrations_dir)
143
+ / "migrations"
144
+ / f"{datetime.now().strftime('%Y%m%d%H%M%S')}_baseline_diff"
145
+ )
146
+ try:
147
+ diff_dir.mkdir(parents=True, exist_ok=True)
148
+ except Exception as e:
149
+ if "Permission denied" in str(e):
150
+ logger.warning(
151
+ f"Permission denied - {e}\nunable to baseline db. Set LITELLM_MIGRATION_DIR environment variable to a writable directory to enable migrations."
152
+ )
153
+ return
154
+ raise e
155
+ diff_sql_path = diff_dir / "migration.sql"
156
+
157
+ # 1. Generate migration SQL for the diff between DB and schema
158
+ try:
159
+ logger.info("Generating migration diff between DB and schema.prisma...")
160
+ with open(diff_sql_path, "w") as f:
161
+ subprocess.run(
162
+ [
163
+ "prisma",
164
+ "migrate",
165
+ "diff",
166
+ "--from-url",
167
+ database_url,
168
+ "--to-schema-datamodel",
169
+ schema_path,
170
+ "--script",
171
+ ],
172
+ check=True,
173
+ timeout=60,
174
+ stdout=f,
175
+ )
176
+ except subprocess.CalledProcessError as e:
177
+ logger.warning(f"Failed to generate migration diff: {e.stderr}")
178
+ except subprocess.TimeoutExpired:
179
+ logger.warning("Migration diff generation timed out.")
180
+
181
+ # check if the migration was created
182
+ if not diff_sql_path.exists():
183
+ logger.warning("Migration diff was not created")
184
+ return
185
+ logger.info(f"Migration diff created at {diff_sql_path}")
186
+
187
+ # 2. Run prisma db execute to apply the migration
188
+ try:
189
+ logger.info("Running prisma db execute to apply the migration diff...")
190
+ result = subprocess.run(
191
+ [
192
+ "prisma",
193
+ "db",
194
+ "execute",
195
+ "--file",
196
+ str(diff_sql_path),
197
+ "--schema",
198
+ schema_path,
199
+ ],
200
+ timeout=60,
201
+ check=True,
202
+ capture_output=True,
203
+ text=True,
204
+ )
205
+ logger.info(f"prisma db execute stdout: {result.stdout}")
206
+ logger.info("✅ Migration diff applied successfully")
207
+ except subprocess.CalledProcessError as e:
208
+ logger.warning(f"Failed to apply migration diff: {e.stderr}")
209
+ except subprocess.TimeoutExpired:
210
+ logger.warning("Migration diff application timed out.")
211
+
212
+ # 3. Mark all migrations as applied
88
213
  migration_names = ProxyExtrasDBManager._get_migration_names(migrations_dir)
89
214
  logger.info(f"Resolving {len(migration_names)} migrations")
90
215
  for migration_name in migration_names:
@@ -105,7 +230,7 @@ class ProxyExtrasDBManager:
105
230
  )
106
231
 
107
232
  @staticmethod
108
- def setup_database(schema_path: str, use_migrate: bool = False) -> bool:
233
+ def setup_database(use_migrate: bool = False) -> bool:
109
234
  """
110
235
  Set up the database using either prisma migrate or prisma db push
111
236
  Uses migrations from litellm-proxy-extras package
@@ -117,6 +242,7 @@ class ProxyExtrasDBManager:
117
242
  Returns:
118
243
  bool: True if setup was successful, False otherwise
119
244
  """
245
+ schema_path = ProxyExtrasDBManager._get_prisma_dir() + "/schema.prisma"
120
246
  use_migrate = str_to_bool(os.getenv("USE_PRISMA_MIGRATE")) or use_migrate
121
247
  for attempt in range(4):
122
248
  original_dir = os.getcwd()
@@ -141,7 +267,34 @@ class ProxyExtrasDBManager:
141
267
  return True
142
268
  except subprocess.CalledProcessError as e:
143
269
  logger.info(f"prisma db error: {e.stderr}, e: {e.stdout}")
144
- if (
270
+ if "P3009" in e.stderr:
271
+ # Extract the failed migration name from the error message
272
+ migration_match = re.search(
273
+ r"`(\d+_.*)` migration", e.stderr
274
+ )
275
+ if migration_match:
276
+ failed_migration = migration_match.group(1)
277
+ logger.info(
278
+ f"Found failed migration: {failed_migration}, marking as rolled back"
279
+ )
280
+ # Mark the failed migration as rolled back
281
+ subprocess.run(
282
+ [
283
+ "prisma",
284
+ "migrate",
285
+ "resolve",
286
+ "--rolled-back",
287
+ failed_migration,
288
+ ],
289
+ timeout=60,
290
+ check=True,
291
+ capture_output=True,
292
+ text=True,
293
+ )
294
+ logger.info(
295
+ f"✅ Migration {failed_migration} marked as rolled back... retrying"
296
+ )
297
+ elif (
145
298
  "P3005" in e.stderr
146
299
  and "database schema is not empty" in e.stderr
147
300
  ):
@@ -152,9 +305,34 @@ class ProxyExtrasDBManager:
152
305
  logger.info(
153
306
  "Baseline migration created, resolving all migrations"
154
307
  )
155
- ProxyExtrasDBManager._resolve_all_migrations(migrations_dir)
308
+ ProxyExtrasDBManager._resolve_all_migrations(
309
+ migrations_dir, schema_path
310
+ )
156
311
  logger.info("✅ All migrations resolved.")
157
312
  return True
313
+ elif (
314
+ "P3018" in e.stderr
315
+ ): # PostgreSQL error code for duplicate column
316
+ logger.info(
317
+ "Migration already exists, resolving specific migration"
318
+ )
319
+ # Extract the migration name from the error message
320
+ migration_match = re.search(
321
+ r"Migration name: (\d+_.*)", e.stderr
322
+ )
323
+ if migration_match:
324
+ migration_name = migration_match.group(1)
325
+ logger.info(f"Rolling back migration {migration_name}")
326
+ ProxyExtrasDBManager._roll_back_migration(
327
+ migration_name
328
+ )
329
+ logger.info(
330
+ f"Resolving migration {migration_name} that failed due to existing columns"
331
+ )
332
+ ProxyExtrasDBManager._resolve_specific_migration(
333
+ migration_name
334
+ )
335
+ logger.info("✅ Migration resolved.")
158
336
  else:
159
337
  # Use prisma db push with increased timeout
160
338
  subprocess.run(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: litellm-proxy-extras
3
- Version: 0.1.8
3
+ Version: 0.1.14
4
4
  Summary: Additional files for the LiteLLM Proxy. Reduces the size of the main litellm package.
5
5
  Author: BerriAI
6
6
  Requires-Python: >=3.8, !=2.7.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, !=3.7.*
@@ -0,0 +1,23 @@
1
+ litellm_proxy_extras/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ litellm_proxy_extras/_logging.py,sha256=7KoWerTOol5IPNyNdbZvLuSlpQbEGE235VgzwpgafKQ,393
3
+ litellm_proxy_extras/migrations/20250326162113_baseline/migration.sql,sha256=9aRWmBbLf7EWbCMXifDl5zL9bAw0uPXJut1AXNKrSTE,13383
4
+ litellm_proxy_extras/migrations/20250326171002_add_daily_user_table/migration.sql,sha256=dY-dNCLosWmXNli2B9wqX4hZpp3s0DL3IwEPtTTC134,1179
5
+ litellm_proxy_extras/migrations/20250327180120_add_api_requests_to_daily_user_table/migration.sql,sha256=or5TaEgH4cHwR5kDvVjZvcAj1OwzTxjqwO-lxXe3FXk,110
6
+ litellm_proxy_extras/migrations/20250329084805_new_cron_job_table/migration.sql,sha256=eZNDwrzKtWFXkTqOKb9JS4hzum1dI-VTXvMqzhryfxc,404
7
+ litellm_proxy_extras/migrations/20250331215456_track_success_and_failed_requests_daily_agg_table/migration.sql,sha256=tyeLY6u8KFyw71osCBM-sdjhIgvHoFCK88cIx8dExNY,178
8
+ litellm_proxy_extras/migrations/20250411215431_add_managed_file_table/migration.sql,sha256=Yu2K37Q90LDhxsFo_64sH0PXdSQ3sHs45Lqzxv2t_20,625
9
+ litellm_proxy_extras/migrations/20250412081753_team_member_permissions/migration.sql,sha256=v3vDx5lb6SLCzXCe_A2NZj7zzmucRXM08aQun_G_MkE,120
10
+ litellm_proxy_extras/migrations/20250415151647_add_cache_read_write_tokens_daily_spend_transactions/migration.sql,sha256=pXUhTLpyXLhGaDnT-epbyW-YSdmO1z5_dxOsETD53Os,194
11
+ litellm_proxy_extras/migrations/20250415191926_add_daily_team_table/migration.sql,sha256=M8DFQOxIPjGcJXIswfHQs2LBTFdJyQBpRbpOT5RIHmw,1330
12
+ litellm_proxy_extras/migrations/20250416115320_add_tag_table_to_db/migration.sql,sha256=meXcoLzZI8-huUGfeJxB9bcOFLTC83mlnpN5LvYbd7I,1720
13
+ litellm_proxy_extras/migrations/20250416151339_drop_tag_uniqueness_requirement/migration.sql,sha256=ezPzK5yd-RDziBLt6X-BEoWYmNYHUublg19OobGuuEs,58
14
+ litellm_proxy_extras/migrations/20250416185146_add_allowed_routes_litellm_verification_token/migration.sql,sha256=XGyDLGakqBt30xuGS-RT5tWhk_Ki7teenxpvKDkIo2E,119
15
+ litellm_proxy_extras/migrations/20250425182129_add_session_id/migration.sql,sha256=dSjU5QavHMM60siy6f_69DZpFCA4lUfWDL0AUEPEt2g,139
16
+ litellm_proxy_extras/migrations/20250430193429_add_managed_vector_stores/migration.sql,sha256=SmXedKWYWwYAZjg-usw-905nXxyU9SOws9teFATezUA,479
17
+ litellm_proxy_extras/migrations/migration_lock.toml,sha256=HbF6jQUaoTYRBzZ1LF4fi37ZK26o6AMRL7viSXBHwhA,24
18
+ litellm_proxy_extras/schema.prisma,sha256=feIdet6dXSzmxzipML2aoPY1VlPFkxQCA66BOknqcv8,16935
19
+ litellm_proxy_extras/utils.py,sha256=QImDkrfX_NGCp0tJ4T9IZpmXyzVZqey4jnVVEO0Hcrs,15050
20
+ litellm_proxy_extras-0.1.14.dist-info/LICENSE,sha256=sXDWv46INd01fgEWgdsCj01R4vsOqJIFj1bgH7ObgnM,1419
21
+ litellm_proxy_extras-0.1.14.dist-info/METADATA,sha256=R9to4bsFJjyfg4-tKRjo9D6lbnrKG091PSikK20_7Y8,1268
22
+ litellm_proxy_extras-0.1.14.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
23
+ litellm_proxy_extras-0.1.14.dist-info/RECORD,,
@@ -1,17 +0,0 @@
1
- litellm_proxy_extras/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- litellm_proxy_extras/_logging.py,sha256=7KoWerTOol5IPNyNdbZvLuSlpQbEGE235VgzwpgafKQ,393
3
- litellm_proxy_extras/migrations/20250326162113_baseline/migration.sql,sha256=9aRWmBbLf7EWbCMXifDl5zL9bAw0uPXJut1AXNKrSTE,13383
4
- litellm_proxy_extras/migrations/20250326171002_add_daily_user_table/migration.sql,sha256=dY-dNCLosWmXNli2B9wqX4hZpp3s0DL3IwEPtTTC134,1179
5
- litellm_proxy_extras/migrations/20250327180120_add_api_requests_to_daily_user_table/migration.sql,sha256=or5TaEgH4cHwR5kDvVjZvcAj1OwzTxjqwO-lxXe3FXk,110
6
- litellm_proxy_extras/migrations/20250329084805_new_cron_job_table/migration.sql,sha256=eZNDwrzKtWFXkTqOKb9JS4hzum1dI-VTXvMqzhryfxc,404
7
- litellm_proxy_extras/migrations/20250331215456_track_success_and_failed_requests_daily_agg_table/migration.sql,sha256=tyeLY6u8KFyw71osCBM-sdjhIgvHoFCK88cIx8dExNY,178
8
- litellm_proxy_extras/migrations/20250411215431_add_managed_file_table/migration.sql,sha256=Yu2K37Q90LDhxsFo_64sH0PXdSQ3sHs45Lqzxv2t_20,625
9
- litellm_proxy_extras/migrations/20250412081753_team_member_permissions/migration.sql,sha256=v3vDx5lb6SLCzXCe_A2NZj7zzmucRXM08aQun_G_MkE,120
10
- litellm_proxy_extras/migrations/20250415191926_add_daily_team_table/migration.sql,sha256=M8DFQOxIPjGcJXIswfHQs2LBTFdJyQBpRbpOT5RIHmw,1330
11
- litellm_proxy_extras/migrations/migration_lock.toml,sha256=HbF6jQUaoTYRBzZ1LF4fi37ZK26o6AMRL7viSXBHwhA,24
12
- litellm_proxy_extras/schema.prisma,sha256=AJs-oTl488I539rej87DQw73l2x28IhLNGw2gZ6T8AA,14579
13
- litellm_proxy_extras/utils.py,sha256=2CQEBosLTjXpGEBwx3sGYglSD4QNy609iV4Ppo9AJdo,7087
14
- litellm_proxy_extras-0.1.8.dist-info/LICENSE,sha256=sXDWv46INd01fgEWgdsCj01R4vsOqJIFj1bgH7ObgnM,1419
15
- litellm_proxy_extras-0.1.8.dist-info/METADATA,sha256=gnPQXPGAp6L1crea8OX5ucn3kuqPRhjOPaAW-LlxGBU,1267
16
- litellm_proxy_extras-0.1.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
17
- litellm_proxy_extras-0.1.8.dist-info/RECORD,,