trustgraph-embeddings-hf 0.22.10__tar.gz → 0.23.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/PKG-INFO +2 -2
- trustgraph-embeddings-hf-0.23.0/trustgraph/embeddings/hf/hf.py +48 -0
- trustgraph-embeddings-hf-0.23.0/trustgraph/embeddings_hf_version.py +1 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph_embeddings_hf.egg-info/PKG-INFO +2 -2
- trustgraph-embeddings-hf-0.22.10/trustgraph/embeddings/hf/hf.py +0 -100
- trustgraph-embeddings-hf-0.22.10/trustgraph/embeddings_hf_version.py +0 -1
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/README.md +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/scripts/embeddings-hf +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/setup.cfg +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/setup.py +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph/__init__.py +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph/embeddings/__init__.py +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph/embeddings/hf/__init__.py +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph/embeddings/hf/__main__.py +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph_embeddings_hf.egg-info/SOURCES.txt +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph_embeddings_hf.egg-info/dependency_links.txt +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph_embeddings_hf.egg-info/requires.txt +0 -0
- {trustgraph-embeddings-hf-0.22.10 → trustgraph-embeddings-hf-0.23.0}/trustgraph_embeddings_hf.egg-info/top_level.txt +0 -0
@@ -1,9 +1,9 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: trustgraph-embeddings-hf
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.23.0
|
4
4
|
Summary: HuggingFace embeddings support for TrustGraph.
|
5
5
|
Home-page: https://github.com/trustgraph-ai/trustgraph
|
6
|
-
Download-URL: https://github.com/trustgraph-ai/trustgraph/archive/refs/tags/v0.
|
6
|
+
Download-URL: https://github.com/trustgraph-ai/trustgraph/archive/refs/tags/v0.23.0.tar.gz
|
7
7
|
Author: trustgraph.ai
|
8
8
|
Author-email: security@trustgraph.ai
|
9
9
|
Classifier: Programming Language :: Python :: 3
|
@@ -0,0 +1,48 @@
|
|
1
|
+
|
2
|
+
"""
|
3
|
+
Embeddings service, applies an embeddings model selected from HuggingFace.
|
4
|
+
Input is text, output is embeddings vector.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from ... base import EmbeddingsService
|
8
|
+
|
9
|
+
from langchain_huggingface import HuggingFaceEmbeddings
|
10
|
+
|
11
|
+
default_ident = "embeddings"
|
12
|
+
|
13
|
+
default_model="all-MiniLM-L6-v2"
|
14
|
+
|
15
|
+
class Processor(EmbeddingsService):
|
16
|
+
|
17
|
+
def __init__(self, **params):
|
18
|
+
|
19
|
+
model = params.get("model", default_model)
|
20
|
+
|
21
|
+
super(Processor, self).__init__(
|
22
|
+
**params | { "model": model }
|
23
|
+
)
|
24
|
+
|
25
|
+
print("Get model...", flush=True)
|
26
|
+
self.embeddings = HuggingFaceEmbeddings(model_name=model)
|
27
|
+
|
28
|
+
async def on_embeddings(self, text):
|
29
|
+
|
30
|
+
embeds = self.embeddings.embed_documents([text])
|
31
|
+
print("Done.", flush=True)
|
32
|
+
return embeds
|
33
|
+
|
34
|
+
@staticmethod
|
35
|
+
def add_args(parser):
|
36
|
+
|
37
|
+
EmbeddingsService.add_args(parser)
|
38
|
+
|
39
|
+
parser.add_argument(
|
40
|
+
'-m', '--model',
|
41
|
+
default="all-MiniLM-L6-v2",
|
42
|
+
help=f'LLM model (default: all-MiniLM-L6-v2)'
|
43
|
+
)
|
44
|
+
|
45
|
+
def run():
|
46
|
+
|
47
|
+
Processor.launch(default_ident, __doc__)
|
48
|
+
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "0.23.0"
|
@@ -1,9 +1,9 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: trustgraph-embeddings-hf
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.23.0
|
4
4
|
Summary: HuggingFace embeddings support for TrustGraph.
|
5
5
|
Home-page: https://github.com/trustgraph-ai/trustgraph
|
6
|
-
Download-URL: https://github.com/trustgraph-ai/trustgraph/archive/refs/tags/v0.
|
6
|
+
Download-URL: https://github.com/trustgraph-ai/trustgraph/archive/refs/tags/v0.23.0.tar.gz
|
7
7
|
Author: trustgraph.ai
|
8
8
|
Author-email: security@trustgraph.ai
|
9
9
|
Classifier: Programming Language :: Python :: 3
|
@@ -1,100 +0,0 @@
|
|
1
|
-
|
2
|
-
"""
|
3
|
-
Embeddings service, applies an embeddings model selected from HuggingFace.
|
4
|
-
Input is text, output is embeddings vector.
|
5
|
-
"""
|
6
|
-
|
7
|
-
from langchain_huggingface import HuggingFaceEmbeddings
|
8
|
-
|
9
|
-
from trustgraph.schema import EmbeddingsRequest, EmbeddingsResponse, Error
|
10
|
-
from trustgraph.schema import embeddings_request_queue
|
11
|
-
from trustgraph.schema import embeddings_response_queue
|
12
|
-
from trustgraph.log_level import LogLevel
|
13
|
-
from trustgraph.base import ConsumerProducer
|
14
|
-
|
15
|
-
module = ".".join(__name__.split(".")[1:-1])
|
16
|
-
|
17
|
-
default_input_queue = embeddings_request_queue
|
18
|
-
default_output_queue = embeddings_response_queue
|
19
|
-
default_subscriber = module
|
20
|
-
default_model="all-MiniLM-L6-v2"
|
21
|
-
|
22
|
-
class Processor(ConsumerProducer):
|
23
|
-
|
24
|
-
def __init__(self, **params):
|
25
|
-
|
26
|
-
input_queue = params.get("input_queue", default_input_queue)
|
27
|
-
output_queue = params.get("output_queue", default_output_queue)
|
28
|
-
subscriber = params.get("subscriber", default_subscriber)
|
29
|
-
model = params.get("model", default_model)
|
30
|
-
|
31
|
-
super(Processor, self).__init__(
|
32
|
-
**params | {
|
33
|
-
"input_queue": input_queue,
|
34
|
-
"output_queue": output_queue,
|
35
|
-
"subscriber": subscriber,
|
36
|
-
"input_schema": EmbeddingsRequest,
|
37
|
-
"output_schema": EmbeddingsResponse,
|
38
|
-
}
|
39
|
-
)
|
40
|
-
|
41
|
-
self.embeddings = HuggingFaceEmbeddings(model_name=model)
|
42
|
-
|
43
|
-
async def handle(self, msg):
|
44
|
-
|
45
|
-
v = msg.value()
|
46
|
-
|
47
|
-
# Sender-produced ID
|
48
|
-
id = msg.properties()["id"]
|
49
|
-
|
50
|
-
print(f"Handling input {id}...", flush=True)
|
51
|
-
|
52
|
-
try:
|
53
|
-
|
54
|
-
text = v.text
|
55
|
-
embeds = self.embeddings.embed_documents([text])
|
56
|
-
|
57
|
-
print("Send response...", flush=True)
|
58
|
-
r = EmbeddingsResponse(vectors=embeds, error=None)
|
59
|
-
await self.send(r, properties={"id": id})
|
60
|
-
|
61
|
-
print("Done.", flush=True)
|
62
|
-
|
63
|
-
|
64
|
-
except Exception as e:
|
65
|
-
|
66
|
-
print(f"Exception: {e}")
|
67
|
-
|
68
|
-
print("Send error response...", flush=True)
|
69
|
-
|
70
|
-
r = EmbeddingsResponse(
|
71
|
-
error=Error(
|
72
|
-
type = "llm-error",
|
73
|
-
message = str(e),
|
74
|
-
),
|
75
|
-
response=None,
|
76
|
-
)
|
77
|
-
|
78
|
-
await self.send(r, properties={"id": id})
|
79
|
-
|
80
|
-
self.consumer.acknowledge(msg)
|
81
|
-
|
82
|
-
|
83
|
-
@staticmethod
|
84
|
-
def add_args(parser):
|
85
|
-
|
86
|
-
ConsumerProducer.add_args(
|
87
|
-
parser, default_input_queue, default_subscriber,
|
88
|
-
default_output_queue,
|
89
|
-
)
|
90
|
-
|
91
|
-
parser.add_argument(
|
92
|
-
'-m', '--model',
|
93
|
-
default="all-MiniLM-L6-v2",
|
94
|
-
help=f'LLM model (default: all-MiniLM-L6-v2)'
|
95
|
-
)
|
96
|
-
|
97
|
-
def run():
|
98
|
-
|
99
|
-
Processor.launch(module, __doc__)
|
100
|
-
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__ = "0.22.10"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|