llms-py 2.0.22__tar.gz → 2.0.24__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llms_py-2.0.22/llms_py.egg-info → llms_py-2.0.24}/PKG-INFO +1 -1
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/main.py +1 -1
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Main.mjs +12 -28
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/ai.mjs +1 -1
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/threadStore.mjs +29 -0
- {llms_py-2.0.22 → llms_py-2.0.24/llms_py.egg-info}/PKG-INFO +1 -1
- {llms_py-2.0.22 → llms_py-2.0.24}/pyproject.toml +1 -1
- {llms_py-2.0.22 → llms_py-2.0.24}/setup.py +1 -1
- {llms_py-2.0.22 → llms_py-2.0.24}/LICENSE +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/MANIFEST.in +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/README.md +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/__init__.py +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/__main__.py +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/index.html +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/llms.json +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Analytics.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/App.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Avatar.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Brand.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/ChatPrompt.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/ModelSelector.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/ProviderIcon.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/ProviderStatus.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Recents.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/SettingsDialog.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Sidebar.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/SignIn.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/SystemPromptEditor.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/SystemPromptSelector.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/Welcome.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/app.css +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/fav.svg +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/chart.js +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/charts.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/color.js +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/highlight.min.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/idb.min.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/marked.min.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/servicestack-client.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/servicestack-vue.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/vue-router.min.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/vue.min.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/lib/vue.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/markdown.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/tailwind.input.css +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/typography.css +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui/utils.mjs +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms/ui.json +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms_py.egg-info/SOURCES.txt +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms_py.egg-info/dependency_links.txt +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms_py.egg-info/entry_points.txt +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms_py.egg-info/not-zip-safe +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms_py.egg-info/requires.txt +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/llms_py.egg-info/top_level.txt +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/requirements.txt +0 -0
- {llms_py-2.0.22 → llms_py-2.0.24}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: llms-py
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.24
|
|
4
4
|
Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
|
|
5
5
|
Home-page: https://github.com/ServiceStack/llms
|
|
6
6
|
Author: ServiceStack
|
|
@@ -497,7 +497,7 @@ export default {
|
|
|
497
497
|
|
|
498
498
|
// Import threads one by one
|
|
499
499
|
let importedCount = 0
|
|
500
|
-
let
|
|
500
|
+
let existingCount = 0
|
|
501
501
|
|
|
502
502
|
const db = await threads.initDB()
|
|
503
503
|
|
|
@@ -506,6 +506,8 @@ export default {
|
|
|
506
506
|
throw new Error('Invalid import file: missing or invalid threads array')
|
|
507
507
|
}
|
|
508
508
|
|
|
509
|
+
const threadIds = new Set(await threads.getAllThreadIds())
|
|
510
|
+
|
|
509
511
|
for (const threadData of importData.threads) {
|
|
510
512
|
if (!threadData.id) {
|
|
511
513
|
console.warn('Skipping thread without ID:', threadData)
|
|
@@ -514,32 +516,13 @@ export default {
|
|
|
514
516
|
|
|
515
517
|
try {
|
|
516
518
|
// Check if thread already exists
|
|
517
|
-
const existingThread =
|
|
518
|
-
|
|
519
|
+
const existingThread = threadIds.has(threadData.id)
|
|
519
520
|
if (existingThread) {
|
|
520
|
-
|
|
521
|
-
await threads.updateThread(threadData.id, {
|
|
522
|
-
title: threadData.title,
|
|
523
|
-
model: threadData.model,
|
|
524
|
-
systemPrompt: threadData.systemPrompt,
|
|
525
|
-
messages: threadData.messages || [],
|
|
526
|
-
createdAt: threadData.createdAt,
|
|
527
|
-
// Keep the existing updatedAt or use imported one
|
|
528
|
-
updatedAt: threadData.updatedAt || existingThread.updatedAt
|
|
529
|
-
})
|
|
530
|
-
updatedCount++
|
|
521
|
+
existingCount++
|
|
531
522
|
} else {
|
|
532
523
|
// Add new thread directly to IndexedDB
|
|
533
524
|
const tx = db.transaction(['threads'], 'readwrite')
|
|
534
|
-
await tx.objectStore('threads').add(
|
|
535
|
-
id: threadData.id,
|
|
536
|
-
title: threadData.title || 'Imported Chat',
|
|
537
|
-
model: threadData.model || '',
|
|
538
|
-
systemPrompt: threadData.systemPrompt || '',
|
|
539
|
-
messages: threadData.messages || [],
|
|
540
|
-
createdAt: threadData.createdAt || new Date().toISOString(),
|
|
541
|
-
updatedAt: threadData.updatedAt || new Date().toISOString()
|
|
542
|
-
})
|
|
525
|
+
await tx.objectStore('threads').add(threadData)
|
|
543
526
|
await tx.complete
|
|
544
527
|
importedCount++
|
|
545
528
|
}
|
|
@@ -551,13 +534,15 @@ export default {
|
|
|
551
534
|
// Reload threads to reflect changes
|
|
552
535
|
await threads.loadThreads()
|
|
553
536
|
|
|
554
|
-
alert(`Import completed!\nNew threads: ${importedCount}\
|
|
537
|
+
alert(`Import completed!\nNew threads: ${importedCount}\nExisting threads: ${existingCount}`)
|
|
555
538
|
}
|
|
556
539
|
if (importData.requests) {
|
|
557
540
|
if (!Array.isArray(importData.requests)) {
|
|
558
541
|
throw new Error('Invalid import file: missing or invalid requests array')
|
|
559
542
|
}
|
|
560
543
|
|
|
544
|
+
const requestIds = new Set(await threads.getAllRequestIds())
|
|
545
|
+
|
|
561
546
|
for (const requestData of importData.requests) {
|
|
562
547
|
if (!requestData.id) {
|
|
563
548
|
console.warn('Skipping request without ID:', requestData)
|
|
@@ -566,10 +551,9 @@ export default {
|
|
|
566
551
|
|
|
567
552
|
try {
|
|
568
553
|
// Check if request already exists
|
|
569
|
-
const existingRequest =
|
|
570
|
-
|
|
554
|
+
const existingRequest = requestIds.has(requestData.id)
|
|
571
555
|
if (existingRequest) {
|
|
572
|
-
|
|
556
|
+
existingCount++
|
|
573
557
|
} else {
|
|
574
558
|
// Add new request directly to IndexedDB
|
|
575
559
|
const db = await threads.initDB()
|
|
@@ -583,7 +567,7 @@ export default {
|
|
|
583
567
|
}
|
|
584
568
|
}
|
|
585
569
|
|
|
586
|
-
alert(`Import completed!\nNew requests: ${importedCount}\
|
|
570
|
+
alert(`Import completed!\nNew requests: ${importedCount}\nExisting requests: ${existingCount}`)
|
|
587
571
|
}
|
|
588
572
|
|
|
589
573
|
} catch (error) {
|
|
@@ -405,6 +405,32 @@ async function getAllRequests() {
|
|
|
405
405
|
return allRequests
|
|
406
406
|
}
|
|
407
407
|
|
|
408
|
+
async function getRequest(requestId) {
|
|
409
|
+
await initDB()
|
|
410
|
+
|
|
411
|
+
const tx = db.transaction(['requests'], 'readonly')
|
|
412
|
+
const store = tx.objectStore('requests')
|
|
413
|
+
const request = await store.get(requestId)
|
|
414
|
+
return request
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
async function getAllRequestIds() {
|
|
418
|
+
await initDB()
|
|
419
|
+
|
|
420
|
+
const tx = db.transaction(['requests'], 'readonly')
|
|
421
|
+
const store = tx.objectStore('requests')
|
|
422
|
+
const ids = await store.getAllKeys()
|
|
423
|
+
return ids
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
async function getAllThreadIds() {
|
|
427
|
+
await initDB()
|
|
428
|
+
const tx = db.transaction(['threads'], 'readonly')
|
|
429
|
+
const store = tx.objectStore('threads')
|
|
430
|
+
const ids = await store.getAllKeys()
|
|
431
|
+
return ids
|
|
432
|
+
}
|
|
433
|
+
|
|
408
434
|
// Query requests with pagination and filtering
|
|
409
435
|
async function getRequests(filters = {}, limit = 20, offset = 0) {
|
|
410
436
|
try {
|
|
@@ -526,9 +552,12 @@ export function useThreadStore() {
|
|
|
526
552
|
setCurrentThreadFromRoute,
|
|
527
553
|
clearCurrentThread,
|
|
528
554
|
getGroupedThreads,
|
|
555
|
+
getRequest,
|
|
529
556
|
getRequests,
|
|
530
557
|
getAllRequests,
|
|
531
558
|
getFilterOptions,
|
|
532
559
|
deleteRequest,
|
|
560
|
+
getAllRequestIds,
|
|
561
|
+
getAllThreadIds,
|
|
533
562
|
}
|
|
534
563
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: llms-py
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.24
|
|
4
4
|
Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
|
|
5
5
|
Home-page: https://github.com/ServiceStack/llms
|
|
6
6
|
Author: ServiceStack
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "llms-py"
|
|
7
|
-
version = "2.0.
|
|
7
|
+
version = "2.0.24"
|
|
8
8
|
description = "A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
license = "BSD-3-Clause"
|
|
@@ -16,7 +16,7 @@ with open(os.path.join(this_directory, "requirements.txt"), encoding="utf-8") as
|
|
|
16
16
|
|
|
17
17
|
setup(
|
|
18
18
|
name="llms-py",
|
|
19
|
-
version="2.0.
|
|
19
|
+
version="2.0.24",
|
|
20
20
|
author="ServiceStack",
|
|
21
21
|
author_email="team@servicestack.net",
|
|
22
22
|
description="A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|