qtype 0.0.14__py3-none-any.whl → 0.0.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qtype/interpreter/api.py +2 -0
- qtype/interpreter/steps/llm_inference.py +22 -1
- {qtype-0.0.14.dist-info → qtype-0.0.16.dist-info}/METADATA +1 -1
- {qtype-0.0.14.dist-info → qtype-0.0.16.dist-info}/RECORD +8 -8
- {qtype-0.0.14.dist-info → qtype-0.0.16.dist-info}/WHEEL +0 -0
- {qtype-0.0.14.dist-info → qtype-0.0.16.dist-info}/entry_points.txt +0 -0
- {qtype-0.0.14.dist-info → qtype-0.0.16.dist-info}/licenses/LICENSE +0 -0
- {qtype-0.0.14.dist-info → qtype-0.0.16.dist-info}/top_level.txt +0 -0
qtype/interpreter/api.py
CHANGED
|
@@ -6,6 +6,7 @@ from pathlib import Path
|
|
|
6
6
|
import pandas as pd
|
|
7
7
|
from fastapi import FastAPI, HTTPException, Query
|
|
8
8
|
from fastapi.middleware.cors import CORSMiddleware
|
|
9
|
+
from fastapi.responses import RedirectResponse
|
|
9
10
|
from fastapi.staticfiles import StaticFiles
|
|
10
11
|
|
|
11
12
|
from qtype.dsl.base_types import StepCardinality
|
|
@@ -70,6 +71,7 @@ class APIExecutor:
|
|
|
70
71
|
StaticFiles(directory=str(ui_dir), html=True),
|
|
71
72
|
name="ui",
|
|
72
73
|
)
|
|
74
|
+
app.get("/")(lambda: RedirectResponse(url="/ui"))
|
|
73
75
|
|
|
74
76
|
flows = self.definition.flows if self.definition.flows else []
|
|
75
77
|
|
|
@@ -1,9 +1,16 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from typing import Any, Callable
|
|
3
3
|
|
|
4
|
+
from llama_cloud import MessageRole as LlamaMessageRole
|
|
4
5
|
from llama_index.core.base.llms.types import ChatResponse, CompletionResponse
|
|
5
6
|
|
|
6
|
-
from qtype.dsl.
|
|
7
|
+
from qtype.dsl.base_types import PrimitiveTypeEnum
|
|
8
|
+
from qtype.dsl.domain_types import (
|
|
9
|
+
ChatContent,
|
|
10
|
+
ChatMessage,
|
|
11
|
+
Embedding,
|
|
12
|
+
MessageRole,
|
|
13
|
+
)
|
|
7
14
|
from qtype.interpreter.conversions import (
|
|
8
15
|
from_chat_message,
|
|
9
16
|
to_chat_message,
|
|
@@ -96,6 +103,20 @@ def execute(
|
|
|
96
103
|
history: list[ChatMessage] = conversation_history
|
|
97
104
|
inputs = [to_chat_message(msg) for msg in history] + inputs
|
|
98
105
|
|
|
106
|
+
if li.system_message and inputs[0].role != LlamaMessageRole.SYSTEM:
|
|
107
|
+
# There is a system prompt we should append
|
|
108
|
+
# Note system_prompt on the llm doesn't work for chat -- is only used for predict https://github.com/run-llama/llama_index/issues/13983
|
|
109
|
+
system_message = ChatMessage(
|
|
110
|
+
role=MessageRole.system,
|
|
111
|
+
blocks=[
|
|
112
|
+
ChatContent(
|
|
113
|
+
type=PrimitiveTypeEnum.text,
|
|
114
|
+
content=li.system_message,
|
|
115
|
+
)
|
|
116
|
+
],
|
|
117
|
+
)
|
|
118
|
+
inputs = [to_chat_message(system_message)] + inputs
|
|
119
|
+
|
|
99
120
|
# If the stream function is set, we'll stream the results
|
|
100
121
|
chat_result: ChatResponse
|
|
101
122
|
if stream_fn:
|
|
@@ -28,7 +28,7 @@ qtype/dsl/domain_types.py,sha256=T0fVhdTyTrdtH4oOYvaRb7fstcReiwTQFCFYbDzPeew,165
|
|
|
28
28
|
qtype/dsl/model.py,sha256=POUDwNm74dTstcRb5Ens-DSpA8r9e1DtGbTm5KA8X74,31190
|
|
29
29
|
qtype/dsl/validator.py,sha256=j-Jb6uHNF9JwOFs1P-iaMyakhjLR7taJn6HKQuY3WfA,18278
|
|
30
30
|
qtype/interpreter/__init__.py,sha256=IaRF90JLFbsTLKz9LTOMI_Pz4xwVaEyXPNaXV7sLou8,43
|
|
31
|
-
qtype/interpreter/api.py,sha256=
|
|
31
|
+
qtype/interpreter/api.py,sha256=kvd1tOgJGeVE2fXaHzXwPS0YOEt_kFnoVLRF_4QPOl8,7827
|
|
32
32
|
qtype/interpreter/conversions.py,sha256=SFkALpQc2qIApYe7ICuQp6eSuB0YLsE9-gJT_B0e2cU,6072
|
|
33
33
|
qtype/interpreter/exceptions.py,sha256=Il8IF0UAtYWQXwvOVQCY-csfRzC1iOejHM1G-nF5EfY,288
|
|
34
34
|
qtype/interpreter/flow.py,sha256=2u1wRahNFQaRRklnU4uW7_UKSD73-uZe_WiYlKitXQg,1233
|
|
@@ -55,7 +55,7 @@ qtype/interpreter/steps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG
|
|
|
55
55
|
qtype/interpreter/steps/agent.py,sha256=Yg56YjTlrwc4oJ-f7ZBazF4ii2oJX8zMhETHF0-ArAU,4008
|
|
56
56
|
qtype/interpreter/steps/condition.py,sha256=E86aRWQpjz18QuKwcGyoIPoOKAvTDZgnpg9qXw6xYas,1128
|
|
57
57
|
qtype/interpreter/steps/decoder.py,sha256=toKiZHpM3_y6imV8xQXmAOvGGFhqJzXaWlM1HLIyOzw,2993
|
|
58
|
-
qtype/interpreter/steps/llm_inference.py,sha256=
|
|
58
|
+
qtype/interpreter/steps/llm_inference.py,sha256=jnRqcTx7yIbJ5Sbi1hjeJUGBw9eOHwgUhe-f-XmngyI,6218
|
|
59
59
|
qtype/interpreter/steps/prompt_template.py,sha256=tK1j6DTUMvlDMcJZDGQ2lLgwq_LkaICIq0U5u-QK_WU,1597
|
|
60
60
|
qtype/interpreter/steps/search.py,sha256=wyVFwg5wVXytsm2JyNPwkuBAWpxEunP-dAiqhDZyii4,660
|
|
61
61
|
qtype/interpreter/steps/tool.py,sha256=SNY1SOTpydwo-P-zf1w2EU9tFkXy9YrfjdI9AP2Kbe8,7276
|
|
@@ -98,9 +98,9 @@ qtype/semantic/generate.py,sha256=c7yzVmzNyOqWdlbofR1FY8QyeiLYddnIdSsd6VvyY0c,15
|
|
|
98
98
|
qtype/semantic/model.py,sha256=HoG6rawJ7UuaDbME9QgQke8lOJYUNdawZBBZucNxPbc,16582
|
|
99
99
|
qtype/semantic/resolver.py,sha256=rhePhY1m4h-qYZucIcBcu0DMocjlOs5OVSbhR5HZ2xo,3404
|
|
100
100
|
qtype/semantic/visualize.py,sha256=ZFXBBxqRkX9vXNoCQAReYU7HB3Ecmsm5sQBMEox8ZNU,17444
|
|
101
|
-
qtype-0.0.
|
|
102
|
-
qtype-0.0.
|
|
103
|
-
qtype-0.0.
|
|
104
|
-
qtype-0.0.
|
|
105
|
-
qtype-0.0.
|
|
106
|
-
qtype-0.0.
|
|
101
|
+
qtype-0.0.16.dist-info/licenses/LICENSE,sha256=1KA5EgYBSR0O6nCH2HEvk6Di53YKJ9r_VCR7G8G8qAY,11341
|
|
102
|
+
qtype-0.0.16.dist-info/METADATA,sha256=5Jafu7CEKPiuG0z3CARPv5WI0Gb7r-9IqPPRbOizEIQ,4737
|
|
103
|
+
qtype-0.0.16.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
104
|
+
qtype-0.0.16.dist-info/entry_points.txt,sha256=5y4vj8RLvgl2tXSj-Hm7v5-Tn3kP4-UonjNoN-mfaQE,41
|
|
105
|
+
qtype-0.0.16.dist-info/top_level.txt,sha256=ONroH5B0mZ51jr7NSWCK0weFwwCO7wBLmyVS1YqNU14,6
|
|
106
|
+
qtype-0.0.16.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|