graphiti-core 0.22.0rc2__py3-none-any.whl → 0.22.0rc4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphiti-core might be problematic. Click here for more details.
- graphiti_core/graphiti.py +459 -326
- graphiti_core/graphiti_types.py +2 -0
- graphiti_core/llm_client/anthropic_client.py +63 -46
- graphiti_core/llm_client/client.py +60 -17
- graphiti_core/llm_client/gemini_client.py +69 -52
- graphiti_core/llm_client/openai_base_client.py +58 -41
- graphiti_core/llm_client/openai_generic_client.py +58 -41
- graphiti_core/prompts/extract_nodes.py +13 -13
- graphiti_core/tracer.py +193 -0
- graphiti_core/utils/maintenance/community_operations.py +4 -1
- graphiti_core/utils/maintenance/edge_operations.py +4 -0
- graphiti_core/utils/maintenance/node_operations.py +10 -2
- graphiti_core/utils/maintenance/temporal_operations.py +4 -1
- graphiti_core/utils/text_utils.py +53 -0
- {graphiti_core-0.22.0rc2.dist-info → graphiti_core-0.22.0rc4.dist-info}/METADATA +5 -1
- {graphiti_core-0.22.0rc2.dist-info → graphiti_core-0.22.0rc4.dist-info}/RECORD +18 -16
- {graphiti_core-0.22.0rc2.dist-info → graphiti_core-0.22.0rc4.dist-info}/WHEEL +0 -0
- {graphiti_core-0.22.0rc2.dist-info → graphiti_core-0.22.0rc4.dist-info}/licenses/LICENSE +0 -0
|
@@ -18,6 +18,8 @@ from typing import Any, Protocol, TypedDict
|
|
|
18
18
|
|
|
19
19
|
from pydantic import BaseModel, Field
|
|
20
20
|
|
|
21
|
+
from graphiti_core.utils.text_utils import MAX_SUMMARY_CHARS
|
|
22
|
+
|
|
21
23
|
from .models import Message, PromptFunction, PromptVersion
|
|
22
24
|
from .prompt_helpers import to_prompt_json
|
|
23
25
|
from .snippets import summary_instructions
|
|
@@ -57,7 +59,7 @@ class EntityClassification(BaseModel):
|
|
|
57
59
|
class EntitySummary(BaseModel):
|
|
58
60
|
summary: str = Field(
|
|
59
61
|
...,
|
|
60
|
-
description='Summary containing the important information about the entity. Under
|
|
62
|
+
description=f'Summary containing the important information about the entity. Under {MAX_SUMMARY_CHARS} characters.',
|
|
61
63
|
)
|
|
62
64
|
|
|
63
65
|
|
|
@@ -259,18 +261,17 @@ def extract_attributes(context: dict[str, Any]) -> list[Message]:
|
|
|
259
261
|
Message(
|
|
260
262
|
role='user',
|
|
261
263
|
content=f"""
|
|
262
|
-
|
|
263
|
-
<MESSAGES>
|
|
264
|
-
{to_prompt_json(context['previous_episodes'], indent=2)}
|
|
265
|
-
{to_prompt_json(context['episode_content'], indent=2)}
|
|
266
|
-
</MESSAGES>
|
|
267
|
-
|
|
268
|
-
Given the above MESSAGES and the following ENTITY, update any of its attributes based on the information provided
|
|
264
|
+
Given the MESSAGES and the following ENTITY, update any of its attributes based on the information provided
|
|
269
265
|
in MESSAGES. Use the provided attribute descriptions to better understand how each attribute should be determined.
|
|
270
266
|
|
|
271
267
|
Guidelines:
|
|
272
268
|
1. Do not hallucinate entity property values if they cannot be found in the current context.
|
|
273
269
|
2. Only use the provided MESSAGES and ENTITY to set attribute values.
|
|
270
|
+
|
|
271
|
+
<MESSAGES>
|
|
272
|
+
{to_prompt_json(context['previous_episodes'], indent=2)}
|
|
273
|
+
{to_prompt_json(context['episode_content'], indent=2)}
|
|
274
|
+
</MESSAGES>
|
|
274
275
|
|
|
275
276
|
<ENTITY>
|
|
276
277
|
{context['node']}
|
|
@@ -289,17 +290,16 @@ def extract_summary(context: dict[str, Any]) -> list[Message]:
|
|
|
289
290
|
Message(
|
|
290
291
|
role='user',
|
|
291
292
|
content=f"""
|
|
293
|
+
Given the MESSAGES and the ENTITY, update the summary that combines relevant information about the entity
|
|
294
|
+
from the messages and relevant information from the existing summary.
|
|
295
|
+
|
|
296
|
+
{summary_instructions}
|
|
292
297
|
|
|
293
298
|
<MESSAGES>
|
|
294
299
|
{to_prompt_json(context['previous_episodes'], indent=2)}
|
|
295
300
|
{to_prompt_json(context['episode_content'], indent=2)}
|
|
296
301
|
</MESSAGES>
|
|
297
302
|
|
|
298
|
-
Given the above MESSAGES and the following ENTITY, update the summary that combines relevant information about the entity
|
|
299
|
-
from the messages and relevant information from the existing summary.
|
|
300
|
-
|
|
301
|
-
{summary_instructions}
|
|
302
|
-
|
|
303
303
|
<ENTITY>
|
|
304
304
|
{context['node']}
|
|
305
305
|
</ENTITY>
|
graphiti_core/tracer.py
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2024, Zep Software, Inc.
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from abc import ABC, abstractmethod
|
|
18
|
+
from collections.abc import Generator
|
|
19
|
+
from contextlib import AbstractContextManager, contextmanager, suppress
|
|
20
|
+
from typing import TYPE_CHECKING, Any
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from opentelemetry.trace import Span, StatusCode
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
from opentelemetry.trace import Span, StatusCode
|
|
27
|
+
|
|
28
|
+
OTEL_AVAILABLE = True
|
|
29
|
+
except ImportError:
|
|
30
|
+
OTEL_AVAILABLE = False
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TracerSpan(ABC):
|
|
34
|
+
"""Abstract base class for tracer spans."""
|
|
35
|
+
|
|
36
|
+
@abstractmethod
|
|
37
|
+
def add_attributes(self, attributes: dict[str, Any]) -> None:
|
|
38
|
+
"""Add attributes to the span."""
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
def set_status(self, status: str, description: str | None = None) -> None:
|
|
43
|
+
"""Set the status of the span."""
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
@abstractmethod
|
|
47
|
+
def record_exception(self, exception: Exception) -> None:
|
|
48
|
+
"""Record an exception in the span."""
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class Tracer(ABC):
|
|
53
|
+
"""Abstract base class for tracers."""
|
|
54
|
+
|
|
55
|
+
@abstractmethod
|
|
56
|
+
def start_span(self, name: str) -> AbstractContextManager[TracerSpan]:
|
|
57
|
+
"""Start a new span with the given name."""
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class NoOpSpan(TracerSpan):
|
|
62
|
+
"""No-op span implementation that does nothing."""
|
|
63
|
+
|
|
64
|
+
def add_attributes(self, attributes: dict[str, Any]) -> None:
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
def set_status(self, status: str, description: str | None = None) -> None:
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
def record_exception(self, exception: Exception) -> None:
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class NoOpTracer(Tracer):
|
|
75
|
+
"""No-op tracer implementation that does nothing."""
|
|
76
|
+
|
|
77
|
+
@contextmanager
|
|
78
|
+
def start_span(self, name: str) -> Generator[NoOpSpan, None, None]:
|
|
79
|
+
"""Return a no-op span."""
|
|
80
|
+
yield NoOpSpan()
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class OpenTelemetrySpan(TracerSpan):
|
|
84
|
+
"""Wrapper for OpenTelemetry span."""
|
|
85
|
+
|
|
86
|
+
def __init__(self, span: 'Span'):
|
|
87
|
+
self._span = span
|
|
88
|
+
|
|
89
|
+
def add_attributes(self, attributes: dict[str, Any]) -> None:
|
|
90
|
+
"""Add attributes to the OpenTelemetry span."""
|
|
91
|
+
try:
|
|
92
|
+
# Filter out None values and convert all values to appropriate types
|
|
93
|
+
filtered_attrs = {}
|
|
94
|
+
for key, value in attributes.items():
|
|
95
|
+
if value is not None:
|
|
96
|
+
# Convert to string if not a primitive type
|
|
97
|
+
if isinstance(value, str | int | float | bool):
|
|
98
|
+
filtered_attrs[key] = value
|
|
99
|
+
else:
|
|
100
|
+
filtered_attrs[key] = str(value)
|
|
101
|
+
|
|
102
|
+
if filtered_attrs:
|
|
103
|
+
self._span.set_attributes(filtered_attrs)
|
|
104
|
+
except Exception:
|
|
105
|
+
# Silently ignore tracing errors
|
|
106
|
+
pass
|
|
107
|
+
|
|
108
|
+
def set_status(self, status: str, description: str | None = None) -> None:
|
|
109
|
+
"""Set the status of the OpenTelemetry span."""
|
|
110
|
+
try:
|
|
111
|
+
if OTEL_AVAILABLE:
|
|
112
|
+
if status == 'error':
|
|
113
|
+
self._span.set_status(StatusCode.ERROR, description)
|
|
114
|
+
elif status == 'ok':
|
|
115
|
+
self._span.set_status(StatusCode.OK, description)
|
|
116
|
+
except Exception:
|
|
117
|
+
# Silently ignore tracing errors
|
|
118
|
+
pass
|
|
119
|
+
|
|
120
|
+
def record_exception(self, exception: Exception) -> None:
|
|
121
|
+
"""Record an exception in the OpenTelemetry span."""
|
|
122
|
+
with suppress(Exception):
|
|
123
|
+
self._span.record_exception(exception)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class OpenTelemetryTracer(Tracer):
|
|
127
|
+
"""Wrapper for OpenTelemetry tracer with configurable span name prefix."""
|
|
128
|
+
|
|
129
|
+
def __init__(self, tracer: Any, span_prefix: str = 'graphiti'):
|
|
130
|
+
"""
|
|
131
|
+
Initialize the OpenTelemetry tracer wrapper.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
tracer : opentelemetry.trace.Tracer
|
|
136
|
+
The OpenTelemetry tracer instance.
|
|
137
|
+
span_prefix : str, optional
|
|
138
|
+
Prefix to prepend to all span names. Defaults to 'graphiti'.
|
|
139
|
+
"""
|
|
140
|
+
if not OTEL_AVAILABLE:
|
|
141
|
+
raise ImportError(
|
|
142
|
+
'OpenTelemetry is not installed. Install it with: pip install opentelemetry-api'
|
|
143
|
+
)
|
|
144
|
+
self._tracer = tracer
|
|
145
|
+
self._span_prefix = span_prefix.rstrip('.')
|
|
146
|
+
|
|
147
|
+
@contextmanager
|
|
148
|
+
def start_span(self, name: str) -> Generator[OpenTelemetrySpan | NoOpSpan, None, None]:
|
|
149
|
+
"""Start a new OpenTelemetry span with the configured prefix."""
|
|
150
|
+
try:
|
|
151
|
+
full_name = f'{self._span_prefix}.{name}'
|
|
152
|
+
with self._tracer.start_as_current_span(full_name) as span:
|
|
153
|
+
yield OpenTelemetrySpan(span)
|
|
154
|
+
except Exception:
|
|
155
|
+
# If tracing fails, yield a no-op span to prevent breaking the operation
|
|
156
|
+
yield NoOpSpan()
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def create_tracer(otel_tracer: Any | None = None, span_prefix: str = 'graphiti') -> Tracer:
|
|
160
|
+
"""
|
|
161
|
+
Create a tracer instance.
|
|
162
|
+
|
|
163
|
+
Parameters
|
|
164
|
+
----------
|
|
165
|
+
otel_tracer : opentelemetry.trace.Tracer | None, optional
|
|
166
|
+
An OpenTelemetry tracer instance. If None, a no-op tracer is returned.
|
|
167
|
+
span_prefix : str, optional
|
|
168
|
+
Prefix to prepend to all span names. Defaults to 'graphiti'.
|
|
169
|
+
|
|
170
|
+
Returns
|
|
171
|
+
-------
|
|
172
|
+
Tracer
|
|
173
|
+
A tracer instance (either OpenTelemetryTracer or NoOpTracer).
|
|
174
|
+
|
|
175
|
+
Examples
|
|
176
|
+
--------
|
|
177
|
+
Using with OpenTelemetry:
|
|
178
|
+
|
|
179
|
+
>>> from opentelemetry import trace
|
|
180
|
+
>>> otel_tracer = trace.get_tracer(__name__)
|
|
181
|
+
>>> tracer = create_tracer(otel_tracer, span_prefix='myapp.graphiti')
|
|
182
|
+
|
|
183
|
+
Using no-op tracer:
|
|
184
|
+
|
|
185
|
+
>>> tracer = create_tracer() # Returns NoOpTracer
|
|
186
|
+
"""
|
|
187
|
+
if otel_tracer is None:
|
|
188
|
+
return NoOpTracer()
|
|
189
|
+
|
|
190
|
+
if not OTEL_AVAILABLE:
|
|
191
|
+
return NoOpTracer()
|
|
192
|
+
|
|
193
|
+
return OpenTelemetryTracer(otel_tracer, span_prefix)
|
|
@@ -138,7 +138,9 @@ async def summarize_pair(llm_client: LLMClient, summary_pair: tuple[str, str]) -
|
|
|
138
138
|
}
|
|
139
139
|
|
|
140
140
|
llm_response = await llm_client.generate_response(
|
|
141
|
-
prompt_library.summarize_nodes.summarize_pair(context),
|
|
141
|
+
prompt_library.summarize_nodes.summarize_pair(context),
|
|
142
|
+
response_model=Summary,
|
|
143
|
+
prompt_name='summarize_nodes.summarize_pair',
|
|
142
144
|
)
|
|
143
145
|
|
|
144
146
|
pair_summary = llm_response.get('summary', '')
|
|
@@ -154,6 +156,7 @@ async def generate_summary_description(llm_client: LLMClient, summary: str) -> s
|
|
|
154
156
|
llm_response = await llm_client.generate_response(
|
|
155
157
|
prompt_library.summarize_nodes.summary_description(context),
|
|
156
158
|
response_model=SummaryDescription,
|
|
159
|
+
prompt_name='summarize_nodes.summary_description',
|
|
157
160
|
)
|
|
158
161
|
|
|
159
162
|
description = llm_response.get('description', '')
|
|
@@ -140,6 +140,7 @@ async def extract_edges(
|
|
|
140
140
|
response_model=ExtractedEdges,
|
|
141
141
|
max_tokens=extract_edges_max_tokens,
|
|
142
142
|
group_id=group_id,
|
|
143
|
+
prompt_name='extract_edges.edge',
|
|
143
144
|
)
|
|
144
145
|
edges_data = ExtractedEdges(**llm_response).edges
|
|
145
146
|
|
|
@@ -152,6 +153,7 @@ async def extract_edges(
|
|
|
152
153
|
response_model=MissingFacts,
|
|
153
154
|
max_tokens=extract_edges_max_tokens,
|
|
154
155
|
group_id=group_id,
|
|
156
|
+
prompt_name='extract_edges.reflexion',
|
|
155
157
|
)
|
|
156
158
|
|
|
157
159
|
missing_facts = reflexion_response.get('missing_facts', [])
|
|
@@ -526,6 +528,7 @@ async def resolve_extracted_edge(
|
|
|
526
528
|
prompt_library.dedupe_edges.resolve_edge(context),
|
|
527
529
|
response_model=EdgeDuplicate,
|
|
528
530
|
model_size=ModelSize.small,
|
|
531
|
+
prompt_name='dedupe_edges.resolve_edge',
|
|
529
532
|
)
|
|
530
533
|
response_object = EdgeDuplicate(**llm_response)
|
|
531
534
|
duplicate_facts = response_object.duplicate_facts
|
|
@@ -589,6 +592,7 @@ async def resolve_extracted_edge(
|
|
|
589
592
|
prompt_library.extract_edges.extract_attributes(edge_attributes_context),
|
|
590
593
|
response_model=edge_model, # type: ignore
|
|
591
594
|
model_size=ModelSize.small,
|
|
595
|
+
prompt_name='extract_edges.extract_attributes',
|
|
592
596
|
)
|
|
593
597
|
|
|
594
598
|
resolved_edge.attributes = edge_attributes_response
|
|
@@ -53,6 +53,7 @@ from graphiti_core.utils.maintenance.dedup_helpers import (
|
|
|
53
53
|
from graphiti_core.utils.maintenance.edge_operations import (
|
|
54
54
|
filter_existing_duplicate_of_edges,
|
|
55
55
|
)
|
|
56
|
+
from graphiti_core.utils.text_utils import MAX_SUMMARY_CHARS, truncate_at_sentence
|
|
56
57
|
|
|
57
58
|
logger = logging.getLogger(__name__)
|
|
58
59
|
|
|
@@ -77,6 +78,7 @@ async def extract_nodes_reflexion(
|
|
|
77
78
|
prompt_library.extract_nodes.reflexion(context),
|
|
78
79
|
MissedEntities,
|
|
79
80
|
group_id=group_id,
|
|
81
|
+
prompt_name='extract_nodes.reflexion',
|
|
80
82
|
)
|
|
81
83
|
missed_entities = llm_response.get('missed_entities', [])
|
|
82
84
|
|
|
@@ -133,18 +135,21 @@ async def extract_nodes(
|
|
|
133
135
|
prompt_library.extract_nodes.extract_message(context),
|
|
134
136
|
response_model=ExtractedEntities,
|
|
135
137
|
group_id=episode.group_id,
|
|
138
|
+
prompt_name='extract_nodes.extract_message',
|
|
136
139
|
)
|
|
137
140
|
elif episode.source == EpisodeType.text:
|
|
138
141
|
llm_response = await llm_client.generate_response(
|
|
139
142
|
prompt_library.extract_nodes.extract_text(context),
|
|
140
143
|
response_model=ExtractedEntities,
|
|
141
144
|
group_id=episode.group_id,
|
|
145
|
+
prompt_name='extract_nodes.extract_text',
|
|
142
146
|
)
|
|
143
147
|
elif episode.source == EpisodeType.json:
|
|
144
148
|
llm_response = await llm_client.generate_response(
|
|
145
149
|
prompt_library.extract_nodes.extract_json(context),
|
|
146
150
|
response_model=ExtractedEntities,
|
|
147
151
|
group_id=episode.group_id,
|
|
152
|
+
prompt_name='extract_nodes.extract_json',
|
|
148
153
|
)
|
|
149
154
|
|
|
150
155
|
response_object = ExtractedEntities(**llm_response)
|
|
@@ -317,6 +322,7 @@ async def _resolve_with_llm(
|
|
|
317
322
|
llm_response = await llm_client.generate_response(
|
|
318
323
|
prompt_library.dedupe_nodes.nodes(context),
|
|
319
324
|
response_model=NodeResolutions,
|
|
325
|
+
prompt_name='dedupe_nodes.nodes',
|
|
320
326
|
)
|
|
321
327
|
|
|
322
328
|
node_resolutions: list[NodeDuplicate] = NodeResolutions(**llm_response).entity_resolutions
|
|
@@ -526,6 +532,7 @@ async def _extract_entity_attributes(
|
|
|
526
532
|
response_model=entity_type,
|
|
527
533
|
model_size=ModelSize.small,
|
|
528
534
|
group_id=node.group_id,
|
|
535
|
+
prompt_name='extract_nodes.extract_attributes',
|
|
529
536
|
)
|
|
530
537
|
|
|
531
538
|
# validate response
|
|
@@ -547,7 +554,7 @@ async def _extract_entity_summary(
|
|
|
547
554
|
summary_context = _build_episode_context(
|
|
548
555
|
node_data={
|
|
549
556
|
'name': node.name,
|
|
550
|
-
'summary': node.summary,
|
|
557
|
+
'summary': truncate_at_sentence(node.summary, MAX_SUMMARY_CHARS),
|
|
551
558
|
'entity_types': node.labels,
|
|
552
559
|
'attributes': node.attributes,
|
|
553
560
|
},
|
|
@@ -560,9 +567,10 @@ async def _extract_entity_summary(
|
|
|
560
567
|
response_model=EntitySummary,
|
|
561
568
|
model_size=ModelSize.small,
|
|
562
569
|
group_id=node.group_id,
|
|
570
|
+
prompt_name='extract_nodes.extract_summary',
|
|
563
571
|
)
|
|
564
572
|
|
|
565
|
-
node.summary = summary_response.get('summary', '')
|
|
573
|
+
node.summary = truncate_at_sentence(summary_response.get('summary', ''), MAX_SUMMARY_CHARS)
|
|
566
574
|
|
|
567
575
|
|
|
568
576
|
def _build_episode_context(
|
|
@@ -43,7 +43,9 @@ async def extract_edge_dates(
|
|
|
43
43
|
'reference_timestamp': current_episode.valid_at.isoformat(),
|
|
44
44
|
}
|
|
45
45
|
llm_response = await llm_client.generate_response(
|
|
46
|
-
prompt_library.extract_edge_dates.v1(context),
|
|
46
|
+
prompt_library.extract_edge_dates.v1(context),
|
|
47
|
+
response_model=EdgeDates,
|
|
48
|
+
prompt_name='extract_edge_dates.v1',
|
|
47
49
|
)
|
|
48
50
|
|
|
49
51
|
valid_at = llm_response.get('valid_at')
|
|
@@ -90,6 +92,7 @@ async def get_edge_contradictions(
|
|
|
90
92
|
prompt_library.invalidate_edges.v2(context),
|
|
91
93
|
response_model=InvalidatedEdges,
|
|
92
94
|
model_size=ModelSize.small,
|
|
95
|
+
prompt_name='invalidate_edges.v2',
|
|
93
96
|
)
|
|
94
97
|
|
|
95
98
|
contradicted_facts: list[int] = llm_response.get('contradicted_facts', [])
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2024, Zep Software, Inc.
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import re
|
|
18
|
+
|
|
19
|
+
# Maximum length for entity/node summaries
|
|
20
|
+
MAX_SUMMARY_CHARS = 250
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def truncate_at_sentence(text: str, max_chars: int) -> str:
|
|
24
|
+
"""
|
|
25
|
+
Truncate text at or about max_chars while respecting sentence boundaries.
|
|
26
|
+
|
|
27
|
+
Attempts to truncate at the last complete sentence before max_chars.
|
|
28
|
+
If no sentence boundary is found before max_chars, truncates at max_chars.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
text: The text to truncate
|
|
32
|
+
max_chars: Maximum number of characters
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Truncated text
|
|
36
|
+
"""
|
|
37
|
+
if not text or len(text) <= max_chars:
|
|
38
|
+
return text
|
|
39
|
+
|
|
40
|
+
# Find all sentence boundaries (., !, ?) up to max_chars
|
|
41
|
+
truncated = text[:max_chars]
|
|
42
|
+
|
|
43
|
+
# Look for sentence boundaries: period, exclamation, or question mark followed by space or end
|
|
44
|
+
sentence_pattern = r'[.!?](?:\s|$)'
|
|
45
|
+
matches = list(re.finditer(sentence_pattern, truncated))
|
|
46
|
+
|
|
47
|
+
if matches:
|
|
48
|
+
# Truncate at the last sentence boundary found
|
|
49
|
+
last_match = matches[-1]
|
|
50
|
+
return text[: last_match.end()].rstrip()
|
|
51
|
+
|
|
52
|
+
# No sentence boundary found, truncate at max_chars
|
|
53
|
+
return truncated.rstrip()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: graphiti-core
|
|
3
|
-
Version: 0.22.
|
|
3
|
+
Version: 0.22.0rc4
|
|
4
4
|
Summary: A temporal graph building library
|
|
5
5
|
Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
|
|
6
6
|
Project-URL: Repository, https://github.com/getzep/graphiti
|
|
@@ -34,6 +34,7 @@ Requires-Dist: langchain-openai>=0.2.6; extra == 'dev'
|
|
|
34
34
|
Requires-Dist: langgraph>=0.2.15; extra == 'dev'
|
|
35
35
|
Requires-Dist: langsmith>=0.1.108; extra == 'dev'
|
|
36
36
|
Requires-Dist: opensearch-py>=3.0.0; extra == 'dev'
|
|
37
|
+
Requires-Dist: opentelemetry-sdk>=1.20.0; extra == 'dev'
|
|
37
38
|
Requires-Dist: pyright>=1.1.404; extra == 'dev'
|
|
38
39
|
Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
|
|
39
40
|
Requires-Dist: pytest-xdist>=3.6.1; extra == 'dev'
|
|
@@ -59,6 +60,9 @@ Requires-Dist: langchain-aws>=0.2.29; extra == 'neptune'
|
|
|
59
60
|
Requires-Dist: opensearch-py>=3.0.0; extra == 'neptune'
|
|
60
61
|
Provides-Extra: sentence-transformers
|
|
61
62
|
Requires-Dist: sentence-transformers>=3.2.1; extra == 'sentence-transformers'
|
|
63
|
+
Provides-Extra: tracing
|
|
64
|
+
Requires-Dist: opentelemetry-api>=1.20.0; extra == 'tracing'
|
|
65
|
+
Requires-Dist: opentelemetry-sdk>=1.20.0; extra == 'tracing'
|
|
62
66
|
Provides-Extra: voyageai
|
|
63
67
|
Requires-Dist: voyageai>=0.2.3; extra == 'voyageai'
|
|
64
68
|
Description-Content-Type: text/markdown
|
|
@@ -2,11 +2,12 @@ graphiti_core/__init__.py,sha256=e5SWFkRiaUwfprYIeIgVIh7JDedNiloZvd3roU-0aDY,55
|
|
|
2
2
|
graphiti_core/edges.py,sha256=2jA3x-9AGTldB52B5rWUhDtXXsj4PWM-MO1msIPsbdI,21048
|
|
3
3
|
graphiti_core/errors.py,sha256=cH_v9TPgEPeQE6GFOHIg5TvejpUCBddGarMY2Whxbwc,2707
|
|
4
4
|
graphiti_core/graph_queries.py,sha256=ZWMqAo5pwb8PO5ddg4zZ0ArhHWuWV42g3R9ULIxsHOs,8058
|
|
5
|
-
graphiti_core/graphiti.py,sha256=
|
|
6
|
-
graphiti_core/graphiti_types.py,sha256=
|
|
5
|
+
graphiti_core/graphiti.py,sha256=5DLW6Z7f1OqfD5rHn-pAAIRvwae7meQ7pOFNBCJDhGM,46593
|
|
6
|
+
graphiti_core/graphiti_types.py,sha256=_v-XsMgV-bBbi5P-PoRVyGJEdHEDJR-Khmv4cU0oZ-4,1094
|
|
7
7
|
graphiti_core/helpers.py,sha256=q8kbL9gz8igdlh-oMUS-ylUyeMlXZb-ccf-HQkrES_0,5184
|
|
8
8
|
graphiti_core/nodes.py,sha256=ox7uDYpaayc5J_mrbMaP-d-jACFx9R7Fb14tvh9aRI8,30426
|
|
9
9
|
graphiti_core/py.typed,sha256=vlmmzQOt7bmeQl9L3XJP4W6Ry0iiELepnOrinKz5KQg,79
|
|
10
|
+
graphiti_core/tracer.py,sha256=5L05H8PdJ1eqhmcHuYTtwMThVGVUdUzTdiFd_-07H4E,6149
|
|
10
11
|
graphiti_core/cross_encoder/__init__.py,sha256=hry59vz21x-AtGZ0MJ7ugw0HTwJkXiddpp_Yqnwsen0,723
|
|
11
12
|
graphiti_core/cross_encoder/bge_reranker_client.py,sha256=y3TfFxZh0Yvj6HUShmfUm6MC7OPXwWUlv1Qe5HF3S3I,1797
|
|
12
13
|
graphiti_core/cross_encoder/client.py,sha256=KLsbfWKOEaAV3adFe3XZlAeb-gje9_sVKCVZTaJP3ac,1441
|
|
@@ -25,16 +26,16 @@ graphiti_core/embedder/gemini.py,sha256=s3_2xjHdFTIuF-fJlBFwh64XK5BLPHHThuBymDpM
|
|
|
25
26
|
graphiti_core/embedder/openai.py,sha256=bIThUoLMeGlHG2-3VikzK6JZfOHKn4PKvUMx5sHxJy8,2192
|
|
26
27
|
graphiti_core/embedder/voyage.py,sha256=oJHAZiNqjdEJOKgoKfGWcxK2-Ewqn5UB3vrBwIwP2u4,2546
|
|
27
28
|
graphiti_core/llm_client/__init__.py,sha256=QgBWUiCeBp6YiA_xqyrDvJ9jIyy1hngH8g7FWahN3nw,776
|
|
28
|
-
graphiti_core/llm_client/anthropic_client.py,sha256=
|
|
29
|
+
graphiti_core/llm_client/anthropic_client.py,sha256=FeMX2LM8c1u4auN0a0nCb03mNz_fxA2M_o1Ci8KR_YU,13781
|
|
29
30
|
graphiti_core/llm_client/azure_openai_client.py,sha256=ekERggAekbb7enes1RJqdRChf_mjaZTFXsnMbxO7azQ,2497
|
|
30
|
-
graphiti_core/llm_client/client.py,sha256=
|
|
31
|
+
graphiti_core/llm_client/client.py,sha256=o1R6TziVhsU55L5sjVeqUxWcKQSO6zvV5Q5hemZhD84,8680
|
|
31
32
|
graphiti_core/llm_client/config.py,sha256=pivp29CDIbDPqgw5NF9Ok2AwcqTV5z5_Q1bgNs1CDGs,2560
|
|
32
33
|
graphiti_core/llm_client/errors.py,sha256=pn6brRiLW60DAUIXJYKBT6MInrS4ueuH1hNLbn_JbQo,1243
|
|
33
|
-
graphiti_core/llm_client/gemini_client.py,sha256=
|
|
34
|
+
graphiti_core/llm_client/gemini_client.py,sha256=uSF3SXSJp1nSdWST2sG7_h6tCGDxfU5zCk6dBvPLH4U,18817
|
|
34
35
|
graphiti_core/llm_client/groq_client.py,sha256=bYLE_cg1QEhugsJOXh4b1vPbxagKeMWqk48240GCzMs,2922
|
|
35
|
-
graphiti_core/llm_client/openai_base_client.py,sha256=
|
|
36
|
+
graphiti_core/llm_client/openai_base_client.py,sha256=qgdzCGC1tuUbSl13UFiOISmf7kMQSpOWCMwuaBHY9AQ,9412
|
|
36
37
|
graphiti_core/llm_client/openai_client.py,sha256=AuaCFQFMJEGzBkFVouccq3XentmWRIKW0RLRBCUMm7Y,3763
|
|
37
|
-
graphiti_core/llm_client/openai_generic_client.py,sha256=
|
|
38
|
+
graphiti_core/llm_client/openai_generic_client.py,sha256=pefLN3WsjQcExTSfk_4nnvJu_wg2ZBKUljWN36EUnwM,7931
|
|
38
39
|
graphiti_core/llm_client/utils.py,sha256=zKpxXEbKa369m4W7RDEf-m56kH46V1Mx3RowcWZEWWs,1000
|
|
39
40
|
graphiti_core/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
40
41
|
graphiti_core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -48,7 +49,7 @@ graphiti_core/prompts/dedupe_nodes.py,sha256=YNNo19Cq8koLVoLCafpjYJOy5nmRZ-tEWhv
|
|
|
48
49
|
graphiti_core/prompts/eval.py,sha256=GWFkfZoPfY8U7mV8Ngd_5a2S2fHS7KjajChntxv1UEY,5360
|
|
49
50
|
graphiti_core/prompts/extract_edge_dates.py,sha256=3Drs3CmvP0gJN5BidWSxrNvLet3HPoTybU3BUIAoc0Y,4218
|
|
50
51
|
graphiti_core/prompts/extract_edges.py,sha256=-yOIvCPwxIAXeqYpNCzouE6i3WfdsexzRXFmcXpQpAg,7113
|
|
51
|
-
graphiti_core/prompts/extract_nodes.py,sha256=
|
|
52
|
+
graphiti_core/prompts/extract_nodes.py,sha256=13aHEC26yUUcbR_xWgpvMSE8CT6HZK28AO8G0j2i8mU,11017
|
|
52
53
|
graphiti_core/prompts/invalidate_edges.py,sha256=yfpcs_pyctnoM77ULPZXEtKW0oHr1MeLsJzC5yrE-o4,3547
|
|
53
54
|
graphiti_core/prompts/lib.py,sha256=DCyHePM4_q-CptTpEXGO_dBv9k7xDtclEaB1dGu7EcI,4092
|
|
54
55
|
graphiti_core/prompts/models.py,sha256=NgxdbPHJpBEcpbXovKyScgpBc73Q-GIW-CBDlBtDjto,894
|
|
@@ -67,16 +68,17 @@ graphiti_core/telemetry/telemetry.py,sha256=47LrzOVBCcZxsYPsnSxWFiztHoxYKKxPwyRX
|
|
|
67
68
|
graphiti_core/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
68
69
|
graphiti_core/utils/bulk_utils.py,sha256=YpVs5olzrAWVd8pIQ8xi1Ql_IsPdbVSahV1JPuwmG4o,20308
|
|
69
70
|
graphiti_core/utils/datetime_utils.py,sha256=J-zYSq7-H-2n9hYOXNIun12kM10vNX9mMATGR_egTmY,1806
|
|
71
|
+
graphiti_core/utils/text_utils.py,sha256=Pth1vkrcHLfBnsPLmoc_F3BtOC73nrDwOIno4g_AF8M,1687
|
|
70
72
|
graphiti_core/utils/maintenance/__init__.py,sha256=vW4H1KyapTl-OOz578uZABYcpND4wPx3Vt6aAPaXh78,301
|
|
71
|
-
graphiti_core/utils/maintenance/community_operations.py,sha256=
|
|
73
|
+
graphiti_core/utils/maintenance/community_operations.py,sha256=OzNo9DW47YWTy67aoq91wRgnKWVelOYduaJpIERdPFY,10803
|
|
72
74
|
graphiti_core/utils/maintenance/dedup_helpers.py,sha256=B7k6KkB6Sii8PZCWNNTvsNiy4BNTNWpoLeGgrPLq6BE,9220
|
|
73
|
-
graphiti_core/utils/maintenance/edge_operations.py,sha256=
|
|
75
|
+
graphiti_core/utils/maintenance/edge_operations.py,sha256=obxycUWskKvetQesW5o0opwB7Hw0kssM4LbIcsy0SyI,26778
|
|
74
76
|
graphiti_core/utils/maintenance/graph_data_operations.py,sha256=42icj3S_ELAJ-NK3jVS_rg_243dmnaZOyUitJj_uJ-M,6085
|
|
75
|
-
graphiti_core/utils/maintenance/node_operations.py,sha256=
|
|
76
|
-
graphiti_core/utils/maintenance/temporal_operations.py,sha256=
|
|
77
|
+
graphiti_core/utils/maintenance/node_operations.py,sha256=70G-Kf1mQJ_9XTi9MJmq5dqC28VJHRxkoAwgMRx4Gvo,20143
|
|
78
|
+
graphiti_core/utils/maintenance/temporal_operations.py,sha256=LWMw8D8-XOZkl412QKa5qOe9vsX_kOhis_dZlwSXY14,3539
|
|
77
79
|
graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
78
80
|
graphiti_core/utils/ontology_utils/entity_types_utils.py,sha256=4eVgxLWY6Q8k9cRJ5pW59IYF--U4nXZsZIGOVb_yHfQ,1285
|
|
79
|
-
graphiti_core-0.22.
|
|
80
|
-
graphiti_core-0.22.
|
|
81
|
-
graphiti_core-0.22.
|
|
82
|
-
graphiti_core-0.22.
|
|
81
|
+
graphiti_core-0.22.0rc4.dist-info/METADATA,sha256=34Mn6GBus0O0cA8veamXE2NAdMeHBCg_8kLuPjMHM8c,27287
|
|
82
|
+
graphiti_core-0.22.0rc4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
83
|
+
graphiti_core-0.22.0rc4.dist-info/licenses/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
|
|
84
|
+
graphiti_core-0.22.0rc4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|