h-ai-brain 0.0.5__py3-none-any.whl → 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- h_ai/application/hai_service.py +19 -9
- h_ai/application/priority_queue_service.py +34 -0
- h_ai/domain/priorityqueue/__init__.py +0 -0
- h_ai/domain/priorityqueue/priority_queue_repository.py +34 -0
- h_ai/domain/priorityqueue/queue_item.py +43 -0
- h_ai/domain/web_docs/ecosystem_link_scorer_service.py +83 -0
- h_ai/domain/web_docs/ecosystem_pattern_repository.py +182 -0
- h_ai/infrastructure/playwright/playwright_web_content_fetcher.py +7 -1
- h_ai/infrastructure/priorityqueue/__init__.py +0 -0
- h_ai/infrastructure/priorityqueue/in_memory_priority_queue_repository.py +87 -0
- {h_ai_brain-0.0.5.dist-info → h_ai_brain-0.0.7.dist-info}/METADATA +1 -1
- {h_ai_brain-0.0.5.dist-info → h_ai_brain-0.0.7.dist-info}/RECORD +16 -8
- {h_ai_brain-0.0.5.dist-info → h_ai_brain-0.0.7.dist-info}/WHEEL +1 -1
- {h_ai_brain-0.0.5.dist-info → h_ai_brain-0.0.7.dist-info}/licenses/LICENSE +0 -0
- {h_ai_brain-0.0.5.dist-info → h_ai_brain-0.0.7.dist-info}/licenses/NOTICE.txt +0 -0
- {h_ai_brain-0.0.5.dist-info → h_ai_brain-0.0.7.dist-info}/top_level.txt +0 -0
h_ai/application/hai_service.py
CHANGED
@@ -1,17 +1,22 @@
|
|
1
|
-
import datetime
|
2
|
-
|
3
1
|
from h_message_bus import NatsPublisherAdapter
|
4
|
-
from h_message_bus.domain.twitter_get_user_request_message import TwitterGetUserRequestMessage
|
5
|
-
from h_message_bus.domain.twitter_get_user_response_message import TwitterGetUserResponseMessage
|
6
|
-
from h_message_bus.domain.
|
7
|
-
|
2
|
+
from h_message_bus.domain.request_messages.twitter_get_user_request_message import TwitterGetUserRequestMessage
|
3
|
+
from h_message_bus.domain.request_messages.twitter_get_user_response_message import TwitterGetUserResponseMessage
|
4
|
+
from h_message_bus.domain.request_messages.vector_read_metadata_request_message import VectorReadMetaDataRequestMessage
|
5
|
+
from h_message_bus.domain.request_messages.vector_read_metadata_response_message import \
|
6
|
+
VectorReadMetaDataResponseMessage
|
7
|
+
from h_message_bus.domain.request_messages.vector_save_request_message import VectorSaveRequestMessage
|
8
|
+
|
9
|
+
from .priority_queue_service import PriorityQueueService
|
8
10
|
from ..application.web_docs_service import WebDocsService
|
11
|
+
from ..infrastructure.priorityqueue.in_memory_priority_queue_repository import InMemoryPriorityQueueRepository
|
9
12
|
|
10
13
|
|
11
14
|
class HaiService:
|
12
15
|
def __init__(self, nats_publisher_adapter: NatsPublisherAdapter):
|
13
16
|
self.nats_publisher_adapter = nats_publisher_adapter
|
14
17
|
self.web_docs_service = WebDocsService()
|
18
|
+
queue = InMemoryPriorityQueueRepository()
|
19
|
+
self.queue_service = PriorityQueueService(queue)
|
15
20
|
|
16
21
|
async def detect_and_store_documentation(self, twitter_screen_name: str):
|
17
22
|
req_message = TwitterGetUserRequestMessage.create_message(twitter_screen_name)
|
@@ -31,8 +36,7 @@ class HaiService:
|
|
31
36
|
document_id = f"{doc.title}_{chapter.heading}_{i}"
|
32
37
|
|
33
38
|
req_metadata = {
|
34
|
-
"source": doc.url
|
35
|
-
"updated": datetime.datetime.utcnow().isoformat()
|
39
|
+
"source": doc.url
|
36
40
|
}
|
37
41
|
i = i + 1
|
38
42
|
|
@@ -45,4 +49,10 @@ class HaiService:
|
|
45
49
|
await self.nats_publisher_adapter.publish(request)
|
46
50
|
|
47
51
|
else:
|
48
|
-
print(f"No documentation found for {twitter_user.screen_name}")
|
52
|
+
print(f"No documentation found for {twitter_user.screen_name}")
|
53
|
+
|
54
|
+
async def load_current_knowledge_base_metadata(self):
|
55
|
+
message = VectorReadMetaDataRequestMessage.create_message()
|
56
|
+
response = await self.nats_publisher_adapter.request(message)
|
57
|
+
metadata_result = VectorReadMetaDataResponseMessage.from_hai_message(response)
|
58
|
+
print(metadata_result)
|
@@ -0,0 +1,34 @@
|
|
1
|
+
|
2
|
+
from typing import Any, List, Optional
|
3
|
+
|
4
|
+
from ..domain.priorityqueue.priority_queue_repository import PriorityQueueRepository
|
5
|
+
from ..domain.priorityqueue.queue_item import QueueItem
|
6
|
+
|
7
|
+
|
8
|
+
class PriorityQueueService:
|
9
|
+
"""Application service to manage priority queue operations"""
|
10
|
+
|
11
|
+
def __init__(self, repository: PriorityQueueRepository):
|
12
|
+
self.repository = repository
|
13
|
+
|
14
|
+
def add_item(self, queue_name: str, content: Any, priority: int, metadata: Optional[dict] = None) -> QueueItem:
|
15
|
+
"""Add an item to the specified queue"""
|
16
|
+
item = QueueItem.create(content, priority, metadata)
|
17
|
+
self.repository.add_item(queue_name, item)
|
18
|
+
return item
|
19
|
+
|
20
|
+
def get_next_item(self, queue_name: str) -> Optional[QueueItem]:
|
21
|
+
"""Get and remove the highest priority item from the queue"""
|
22
|
+
return self.repository.get_highest_priority_item(queue_name)
|
23
|
+
|
24
|
+
def get_items(self, queue_name: str, limit: int = 10) -> List[QueueItem]:
|
25
|
+
"""Get multiple items from the queue in priority order without removing them"""
|
26
|
+
return self.repository.get_items(queue_name, limit)
|
27
|
+
|
28
|
+
def get_queue_length(self, queue_name: str) -> int:
|
29
|
+
"""Get the number of items in the queue"""
|
30
|
+
return self.repository.queue_length(queue_name)
|
31
|
+
|
32
|
+
def get_available_queues(self) -> List[str]:
|
33
|
+
"""Get a list of all available queue names"""
|
34
|
+
return self.repository.get_queue_names()
|
File without changes
|
@@ -0,0 +1,34 @@
|
|
1
|
+
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
from typing import List, Optional
|
4
|
+
|
5
|
+
from .queue_item import QueueItem
|
6
|
+
|
7
|
+
|
8
|
+
class PriorityQueueRepository(ABC):
|
9
|
+
"""Repository interface for priority queue operations"""
|
10
|
+
|
11
|
+
@abstractmethod
|
12
|
+
def add_item(self, queue_name: str, item: QueueItem) -> None:
|
13
|
+
"""Add an item to the specified queue"""
|
14
|
+
pass
|
15
|
+
|
16
|
+
@abstractmethod
|
17
|
+
def get_highest_priority_item(self, queue_name: str) -> Optional[QueueItem]:
|
18
|
+
"""Get and remove the highest priority item from the queue"""
|
19
|
+
pass
|
20
|
+
|
21
|
+
@abstractmethod
|
22
|
+
def get_items(self, queue_name: str, limit: int = 10) -> List[QueueItem]:
|
23
|
+
"""Get multiple items from the queue in priority order without removing them"""
|
24
|
+
pass
|
25
|
+
|
26
|
+
@abstractmethod
|
27
|
+
def queue_length(self, queue_name: str) -> int:
|
28
|
+
"""Get the number of items in the queue"""
|
29
|
+
pass
|
30
|
+
|
31
|
+
@abstractmethod
|
32
|
+
def get_queue_names(self) -> List[str]:
|
33
|
+
"""Get a list of all available queue names"""
|
34
|
+
pass
|
@@ -0,0 +1,43 @@
|
|
1
|
+
|
2
|
+
import uuid
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from datetime import datetime
|
5
|
+
from typing import Optional
|
6
|
+
|
7
|
+
from h_message_bus import HaiMessage
|
8
|
+
|
9
|
+
|
10
|
+
@dataclass(frozen=True)
|
11
|
+
class QueueItem:
|
12
|
+
"""Value object representing an item in the priority queue"""
|
13
|
+
id: str
|
14
|
+
content: HaiMessage
|
15
|
+
priority: int
|
16
|
+
created_at: datetime
|
17
|
+
metadata: Optional[dict] = None
|
18
|
+
|
19
|
+
@classmethod
|
20
|
+
def create(cls, content: HaiMessage, priority: int, metadata: Optional[dict] = None) -> "QueueItem":
|
21
|
+
"""Factory method to create a new QueueItem"""
|
22
|
+
return cls(
|
23
|
+
id=str(uuid.uuid4()),
|
24
|
+
content=content,
|
25
|
+
priority=priority,
|
26
|
+
created_at=datetime.utcnow(),
|
27
|
+
metadata=metadata
|
28
|
+
)
|
29
|
+
|
30
|
+
def __lt__(self, other):
|
31
|
+
"""Comparison method for priority queue ordering
|
32
|
+
- Primary sort by priority: Higher number = higher priority
|
33
|
+
- Secondary sort by timestamp: Earlier timestamp = higher priority (FIFO)
|
34
|
+
"""
|
35
|
+
if not isinstance(other, QueueItem):
|
36
|
+
return NotImplemented
|
37
|
+
|
38
|
+
# First, compare by priority (higher priority value comes first)
|
39
|
+
if self.priority != other.priority:
|
40
|
+
return self.priority > other.priority
|
41
|
+
|
42
|
+
# If priorities are equal, compare by timestamp (older timestamp comes first)
|
43
|
+
return self.created_at < other.created_at
|
@@ -0,0 +1,83 @@
|
|
1
|
+
import re
|
2
|
+
from urllib.parse import urlparse
|
3
|
+
|
4
|
+
from ...domain.web_docs.ecosystem_pattern_repository import EcosystemPatternRepository
|
5
|
+
|
6
|
+
|
7
|
+
class EcosystemLinkScorerService:
|
8
|
+
"""Service for scoring potential ecosystem-related links and content"""
|
9
|
+
|
10
|
+
def __init__(self, pattern_repo: EcosystemPatternRepository):
|
11
|
+
self.pattern_repo = pattern_repo
|
12
|
+
|
13
|
+
def score(self, full_url: str, link_text: str) -> float:
|
14
|
+
"""
|
15
|
+
Score a link based on how likely it is to be ecosystem-related
|
16
|
+
Returns a value between 0.0 and 1.0
|
17
|
+
"""
|
18
|
+
score = 0.0
|
19
|
+
max_score = 3.0 # Maximum possible score
|
20
|
+
|
21
|
+
# Parse the URL
|
22
|
+
parsed_url = urlparse(full_url)
|
23
|
+
domain = parsed_url.netloc
|
24
|
+
path = parsed_url.path
|
25
|
+
|
26
|
+
# Check domain patterns
|
27
|
+
for eco_domain in self.pattern_repo.ecosystem_domains:
|
28
|
+
if eco_domain in domain:
|
29
|
+
score += 1.0
|
30
|
+
break
|
31
|
+
|
32
|
+
# Check path patterns
|
33
|
+
for path_pattern in self.pattern_repo.ecosystem_path_patterns:
|
34
|
+
if re.search(path_pattern, path):
|
35
|
+
score += 1.0
|
36
|
+
break
|
37
|
+
|
38
|
+
# Check link text patterns
|
39
|
+
for text_pattern in self.pattern_repo.ecosystem_text_patterns:
|
40
|
+
if re.search(text_pattern, link_text):
|
41
|
+
score += 1.0
|
42
|
+
break
|
43
|
+
|
44
|
+
# Normalize score to 0.0-1.0 range
|
45
|
+
return min(score / max_score, 1.0)
|
46
|
+
|
47
|
+
def score_page(self, page_url: str, page_title: str, page_content: str) -> float:
|
48
|
+
"""
|
49
|
+
Score an entire page based on how likely it is to contain ecosystem information
|
50
|
+
Returns a value between 0.0 and 1.0
|
51
|
+
|
52
|
+
Args:
|
53
|
+
page_url: The URL of the page
|
54
|
+
page_title: The title of the page
|
55
|
+
page_content: The full text content of the page
|
56
|
+
"""
|
57
|
+
# Start with the URL and title scoring
|
58
|
+
url_score = self.score(page_url, page_title)
|
59
|
+
|
60
|
+
# Content-based scoring
|
61
|
+
content_score = 0.0
|
62
|
+
max_content_score = 2.0
|
63
|
+
|
64
|
+
# Check content patterns
|
65
|
+
content_matches = 0
|
66
|
+
for content_pattern in self.pattern_repo.ecosystem_content_patterns:
|
67
|
+
if re.search(content_pattern, page_content):
|
68
|
+
content_matches += 1
|
69
|
+
|
70
|
+
# Score based on number of content matches
|
71
|
+
if content_matches >= 3:
|
72
|
+
content_score += 1.0
|
73
|
+
elif content_matches > 0:
|
74
|
+
content_score += 0.5
|
75
|
+
|
76
|
+
# Check for header patterns
|
77
|
+
for header_pattern in self.pattern_repo.ecosystem_header_patterns:
|
78
|
+
if re.search(header_pattern, page_content):
|
79
|
+
content_score += 1.0
|
80
|
+
break
|
81
|
+
|
82
|
+
# Combined score with higher weight on content
|
83
|
+
return min((url_score + (content_score / max_content_score) * 2) / 3, 1.0)
|
@@ -0,0 +1,182 @@
|
|
1
|
+
class EcosystemPatternRepository:
|
2
|
+
"""Repository of patterns that identify ecosystem relationships, builders, and collaboration"""
|
3
|
+
|
4
|
+
def __init__(self):
|
5
|
+
# Domains commonly associated with ecosystem and project showcases
|
6
|
+
self.ecosystem_domains = [
|
7
|
+
"showcase.",
|
8
|
+
"ecosystem.",
|
9
|
+
"community.",
|
10
|
+
"gallery.",
|
11
|
+
"partners.",
|
12
|
+
"developers.",
|
13
|
+
"marketplace.",
|
14
|
+
"expo.",
|
15
|
+
"apps.",
|
16
|
+
"extensions.",
|
17
|
+
"plugins.",
|
18
|
+
]
|
19
|
+
|
20
|
+
# URL path patterns indicating ecosystem/builder content
|
21
|
+
self.ecosystem_path_patterns = [
|
22
|
+
r"/ecosystem/",
|
23
|
+
r"/showcase/",
|
24
|
+
r"/community/",
|
25
|
+
r"/built-with/",
|
26
|
+
r"/case-studies/",
|
27
|
+
r"/customers/",
|
28
|
+
r"/partners/",
|
29
|
+
r"/users/",
|
30
|
+
r"/success-stories/",
|
31
|
+
r"/integrations/",
|
32
|
+
r"/extensions/",
|
33
|
+
r"/marketplace/",
|
34
|
+
r"/plugins/",
|
35
|
+
r"/addons/",
|
36
|
+
r"/gallery/",
|
37
|
+
r"/examples/",
|
38
|
+
r"/projects/",
|
39
|
+
r"/contributors/",
|
40
|
+
r"/whos-using/",
|
41
|
+
]
|
42
|
+
|
43
|
+
# Link text patterns suggesting ecosystem content
|
44
|
+
self.ecosystem_text_patterns = [
|
45
|
+
r"(?i)ecosystem",
|
46
|
+
r"(?i)showcase",
|
47
|
+
r"(?i)built with",
|
48
|
+
r"(?i)powered by",
|
49
|
+
r"(?i)case stud(y|ies)",
|
50
|
+
r"(?i)success stor(y|ies)",
|
51
|
+
r"(?i)who('s| is) using",
|
52
|
+
r"(?i)our users",
|
53
|
+
r"(?i)our customers",
|
54
|
+
r"(?i)integrations?",
|
55
|
+
r"(?i)extensions?",
|
56
|
+
r"(?i)plugins?",
|
57
|
+
r"(?i)addons?",
|
58
|
+
r"(?i)community projects",
|
59
|
+
r"(?i)community contributions",
|
60
|
+
r"(?i)user contributions",
|
61
|
+
r"(?i)featured projects",
|
62
|
+
r"(?i)gallery",
|
63
|
+
]
|
64
|
+
|
65
|
+
# Header/title patterns suggesting ecosystem sections
|
66
|
+
self.ecosystem_header_patterns = [
|
67
|
+
r"(?i)ecosystem",
|
68
|
+
r"(?i)who('s| is) using",
|
69
|
+
r"(?i)built (on|with)",
|
70
|
+
r"(?i)powered by",
|
71
|
+
r"(?i)trusted by",
|
72
|
+
r"(?i)customer(s| success)",
|
73
|
+
r"(?i)case stud(y|ies)",
|
74
|
+
r"(?i)success stor(y|ies)",
|
75
|
+
r"(?i)showcase",
|
76
|
+
r"(?i)featured (users|customers|projects)",
|
77
|
+
r"(?i)community (projects|showcase)",
|
78
|
+
r"(?i)partner(s| program)",
|
79
|
+
r"(?i)(our|notable) users",
|
80
|
+
r"(?i)companies using",
|
81
|
+
r"(?i)in production",
|
82
|
+
r"(?i)contributor(s| showcase)",
|
83
|
+
r"(?i)extension (gallery|showcase)",
|
84
|
+
r"(?i)plugin (directory|marketplace)",
|
85
|
+
r"(?i)apps? (built|marketplace|gallery)",
|
86
|
+
]
|
87
|
+
|
88
|
+
# Content phrases that suggest ecosystem descriptions
|
89
|
+
self.ecosystem_content_patterns = [
|
90
|
+
r"(?i)built (on|with) (our|this)",
|
91
|
+
r"(?i)(companies|organizations|projects) (using|powered by)",
|
92
|
+
r"(?i)(is|are) using (our|this)",
|
93
|
+
r"(?i)powered by (our|this)",
|
94
|
+
r"(?i)extend(s|ing)? (the|our) (platform|ecosystem)",
|
95
|
+
r"(?i)integrated with",
|
96
|
+
r"(?i)build(s|ing)? (on top of|with)",
|
97
|
+
r"(?i)leverage(s|ing)? (our|this)",
|
98
|
+
r"(?i)extend(s|ing)? (the|our) (functionality|capabilities)",
|
99
|
+
r"(?i)based on (our|this)",
|
100
|
+
r"(?i)implemented (with|using)",
|
101
|
+
r"(?i)developed (with|using)",
|
102
|
+
r"(?i)(join|be part of) (our|the) ecosystem",
|
103
|
+
]
|
104
|
+
|
105
|
+
# Builder and contribution-specific patterns
|
106
|
+
self.builder_patterns = [
|
107
|
+
r"(?i)how to (build|contribute)",
|
108
|
+
r"(?i)build(ing)? (with|on)",
|
109
|
+
r"(?i)develop(ing)? (with|on)",
|
110
|
+
r"(?i)contribute to",
|
111
|
+
r"(?i)contributor guide",
|
112
|
+
r"(?i)developer program",
|
113
|
+
r"(?i)join (our|the) (ecosystem|community)",
|
114
|
+
r"(?i)become a (contributor|partner)",
|
115
|
+
r"(?i)extend (our|the) (platform|ecosystem)",
|
116
|
+
r"(?i)create (your own|an?) (plugin|extension|integration)",
|
117
|
+
r"(?i)developer (resources|portal)",
|
118
|
+
r"(?i)sdk",
|
119
|
+
r"(?i)api (access|integration)",
|
120
|
+
r"(?i)partner (program|portal)",
|
121
|
+
]
|
122
|
+
|
123
|
+
# Visual cues that often indicate ecosystem showcases
|
124
|
+
self.visual_indicators = [
|
125
|
+
r"logo grid",
|
126
|
+
r"logo carousel",
|
127
|
+
r"client logos",
|
128
|
+
r"partner logos",
|
129
|
+
r"customer logos",
|
130
|
+
r"company logos",
|
131
|
+
r"card gallery",
|
132
|
+
r"project cards",
|
133
|
+
r"showcase gallery",
|
134
|
+
r"case study cards",
|
135
|
+
r"testimonials",
|
136
|
+
r"user testimonials",
|
137
|
+
]
|
138
|
+
|
139
|
+
# Collaboration-specific patterns
|
140
|
+
self.collaboration_patterns = [
|
141
|
+
r"(?i)how to collaborate",
|
142
|
+
r"(?i)collaboration (guide|opportunities)",
|
143
|
+
r"(?i)working together",
|
144
|
+
r"(?i)partner(ship|ing) (opportunities|program)",
|
145
|
+
r"(?i)join (our|the) (community|ecosystem)",
|
146
|
+
r"(?i)community (contribution|participation)",
|
147
|
+
r"(?i)open (source|collaboration)",
|
148
|
+
r"(?i)contribute (code|documentation|resources)",
|
149
|
+
r"(?i)become a (partner|contributor|maintainer)",
|
150
|
+
r"(?i)collaboration (framework|model)",
|
151
|
+
r"(?i)(business|technical) partnership",
|
152
|
+
r"(?i)developer relations",
|
153
|
+
r"(?i)community (engagement|involvement)",
|
154
|
+
]
|
155
|
+
|
156
|
+
# Key meta tags that might indicate ecosystem content
|
157
|
+
self.meta_tag_patterns = [
|
158
|
+
r"(?i)ecosystem",
|
159
|
+
r"(?i)showcase",
|
160
|
+
r"(?i)community",
|
161
|
+
r"(?i)partner program",
|
162
|
+
r"(?i)integration",
|
163
|
+
r"(?i)extension",
|
164
|
+
r"(?i)plugin",
|
165
|
+
r"(?i)marketplace",
|
166
|
+
r"(?i)collaboration",
|
167
|
+
r"(?i)use cases",
|
168
|
+
r"(?i)case studies",
|
169
|
+
r"(?i)success stories",
|
170
|
+
]
|
171
|
+
|
172
|
+
# Schema.org types that often indicate ecosystem relationships
|
173
|
+
self.schema_types = [
|
174
|
+
"Product",
|
175
|
+
"SoftwareApplication",
|
176
|
+
"Organization",
|
177
|
+
"BusinessPartner",
|
178
|
+
"ProgramMembership",
|
179
|
+
"CreativeWork",
|
180
|
+
"SoftwareSourceCode",
|
181
|
+
"WebApplication",
|
182
|
+
]
|
@@ -19,7 +19,13 @@ class PlayWrightWebContentFetcher(WebFetcherRepository):
|
|
19
19
|
|
20
20
|
async def fetch(self, url: str) -> Optional[str]:
|
21
21
|
async with async_playwright() as p:
|
22
|
-
browser = await p.chromium.launch(headless=True
|
22
|
+
browser = await p.chromium.launch(headless=True, args=[
|
23
|
+
'--disable-dev-shm-usage', # Required for Docker
|
24
|
+
'--no-sandbox', # Required for Docker non-root user
|
25
|
+
'--disable-setuid-sandbox', # Required for Docker security
|
26
|
+
'--disable-gpu', # Reduces resource usage
|
27
|
+
])
|
28
|
+
|
23
29
|
logger.debug(
|
24
30
|
f"Launching headless browser with user agent: {self.headers.get('User-Agent')}"
|
25
31
|
)
|
File without changes
|
@@ -0,0 +1,87 @@
|
|
1
|
+
|
2
|
+
import threading
|
3
|
+
from queue import PriorityQueue
|
4
|
+
from typing import Dict, List, Optional
|
5
|
+
|
6
|
+
from ...domain.priorityqueue.priority_queue_repository import PriorityQueueRepository
|
7
|
+
from ...domain.priorityqueue.queue_item import QueueItem
|
8
|
+
|
9
|
+
|
10
|
+
class InMemoryPriorityQueueRepository(PriorityQueueRepository):
|
11
|
+
"""In-memory implementation of the PriorityQueueRepository using Python's PriorityQueue"""
|
12
|
+
|
13
|
+
def __init__(self):
|
14
|
+
# Dictionary mapping queue names to their PriorityQueue instances
|
15
|
+
self.queues: Dict[str, PriorityQueue] = {}
|
16
|
+
# Locks to ensure thread safety
|
17
|
+
self.locks: Dict[str, threading.Lock] = {}
|
18
|
+
|
19
|
+
def _get_or_create_queue(self, queue_name: str, maxsize: int = 10000) -> PriorityQueue:
|
20
|
+
"""Get or create a queue with the given name"""
|
21
|
+
if queue_name not in self.queues:
|
22
|
+
self.queues[queue_name] = PriorityQueue(maxsize=maxsize)
|
23
|
+
self.locks[queue_name] = threading.Lock()
|
24
|
+
return self.queues[queue_name]
|
25
|
+
|
26
|
+
def _get_lock(self, queue_name: str) -> threading.Lock:
|
27
|
+
"""Get the lock for the specified queue"""
|
28
|
+
if queue_name not in self.locks:
|
29
|
+
self.locks[queue_name] = threading.Lock()
|
30
|
+
return self.locks[queue_name]
|
31
|
+
|
32
|
+
def add_item(self, queue_name: str, item: QueueItem) -> None:
|
33
|
+
"""Add an item to the specified queue"""
|
34
|
+
queue = self._get_or_create_queue(queue_name)
|
35
|
+
with self._get_lock(queue_name):
|
36
|
+
# The queue automatically orders by priority
|
37
|
+
queue.put(item)
|
38
|
+
|
39
|
+
def get_highest_priority_item(self, queue_name: str) -> Optional[QueueItem]:
|
40
|
+
"""Get and remove the highest priority item from the queue"""
|
41
|
+
if queue_name not in self.queues:
|
42
|
+
return None
|
43
|
+
|
44
|
+
queue = self.queues[queue_name]
|
45
|
+
with self._get_lock(queue_name):
|
46
|
+
if queue.empty():
|
47
|
+
return None
|
48
|
+
return queue.get()
|
49
|
+
|
50
|
+
def get_items(self, queue_name: str, limit: int = 10) -> List[QueueItem]:
|
51
|
+
"""Get multiple items from the queue in priority order without removing them"""
|
52
|
+
if queue_name not in self.queues:
|
53
|
+
return []
|
54
|
+
|
55
|
+
queue = self.queues[queue_name]
|
56
|
+
result = []
|
57
|
+
|
58
|
+
with self._get_lock(queue_name):
|
59
|
+
# Create a temporary list to hold items that we'll put back
|
60
|
+
temp_items = []
|
61
|
+
|
62
|
+
# Get up to 'limit' items
|
63
|
+
count = 0
|
64
|
+
while not queue.empty() and count < limit:
|
65
|
+
item = queue.get()
|
66
|
+
temp_items.append(item)
|
67
|
+
result.append(item)
|
68
|
+
count += 1
|
69
|
+
|
70
|
+
# Put all the items back in the same order
|
71
|
+
for item in temp_items:
|
72
|
+
queue.put(item)
|
73
|
+
|
74
|
+
return result
|
75
|
+
|
76
|
+
def queue_length(self, queue_name: str) -> int:
|
77
|
+
"""Get the number of items in the queue"""
|
78
|
+
if queue_name not in self.queues:
|
79
|
+
return 0
|
80
|
+
|
81
|
+
queue = self.queues[queue_name]
|
82
|
+
with self._get_lock(queue_name):
|
83
|
+
return queue.qsize()
|
84
|
+
|
85
|
+
def get_queue_names(self) -> List[str]:
|
86
|
+
"""Get a list of all available queue names"""
|
87
|
+
return list(self.queues.keys())
|
@@ -1,10 +1,14 @@
|
|
1
1
|
h_ai/__init__.py,sha256=bmHMDoui52Q73UvXdHslQ3w_LubmhRuKRlrjOYyCP8c,153
|
2
2
|
h_ai/application/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
h_ai/application/hai_service.py,sha256=
|
3
|
+
h_ai/application/hai_service.py,sha256=ZSbDLP8oCBliLpbhpKOf7Xv6cWDsLCSeDeurn207FsA,3005
|
4
|
+
h_ai/application/priority_queue_service.py,sha256=qi1PZMLYxy3HzG6cu-2hLijUYGcYGYMpzAp4S0vl0I0,1462
|
4
5
|
h_ai/application/web_docs_service.py,sha256=YiPBfPyjlloDq6CIOP0u7F1jNBK-elYRU8xl4qJ1oVc,1652
|
5
6
|
h_ai/application/system_prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
7
|
h_ai/application/system_prompts/roles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
8
|
h_ai/domain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
9
|
+
h_ai/domain/priorityqueue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
+
h_ai/domain/priorityqueue/priority_queue_repository.py,sha256=XCjBrJezloblcqOPJUStTmU5tofx954h-mzxOsYIAbg,1042
|
11
|
+
h_ai/domain/priorityqueue/queue_item.py,sha256=aP2Sd3ig9dgKnAsKE_rr3uRjJ_ClWIrvz0Y3nC8XbmE,1417
|
8
12
|
h_ai/domain/reasoning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
9
13
|
h_ai/domain/reasoning/llm_chat_repository.py,sha256=rY2izDyaDnoyyrCRS1qc9erHB98vARj4Mp-SnPwNhyY,211
|
10
14
|
h_ai/domain/reasoning/llm_generate_respository.py,sha256=DPiV6ldCE8YhDdVb5rj98MBudKalDQHV3CZ2ADTm_f8,178
|
@@ -14,6 +18,8 @@ h_ai/domain/reasoning/tool_message.py,sha256=jpbfbJXj6oqZyB3lDxGOUyFB4faHtXAaEOV
|
|
14
18
|
h_ai/domain/web_docs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
15
19
|
h_ai/domain/web_docs/doc_link_scorer_service.py,sha256=EmLSOaX7BCUQcKHZquaUt-Ps_DssZrRqpch5MgbUhAc,1444
|
16
20
|
h_ai/domain/web_docs/documentation_pattern_repository.py,sha256=VhNzP3PUqgg9MaWhBVefj13XNxRBh6ZPUt-KH70ww2w,1302
|
21
|
+
h_ai/domain/web_docs/ecosystem_link_scorer_service.py,sha256=Slin3ZAdJ3o3CxTvJtfD-vd4R4f-MINd3PY2G3bCCQg,2899
|
22
|
+
h_ai/domain/web_docs/ecosystem_pattern_repository.py,sha256=uHBhEvz3HmhXRvFJ6BzJddZmngPSAQw-q39TgRLJiPg,6609
|
17
23
|
h_ai/domain/web_docs/gitbook_web_fetcher_service.py,sha256=Ye-TcuwgW1fhIY8x6v9_-pmPN9pVFWzlOpwRt-4teaA,6490
|
18
24
|
h_ai/domain/web_docs/web_docs_link_detector.py,sha256=NyMKFNs-41bqrxx6u-9GqIufy7pkDF_-_f1h8HECBK8,1192
|
19
25
|
h_ai/domain/web_docs/web_link.py,sha256=J4KC3MmjkvWlAPDdEdjcqAZCvuSnJMahudCohiBk3wk,307
|
@@ -39,10 +45,12 @@ h_ai/infrastructure/llm/ollama/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
|
|
39
45
|
h_ai/infrastructure/llm/ollama/models/ollama_chat_message.py,sha256=ZIz4PQ3869vI3xAYYufPrxXpacajRDtOI8RDl5Dm9RQ,305
|
40
46
|
h_ai/infrastructure/llm/ollama/models/ollama_chat_session.py,sha256=GZ_ddpbWa8iy6NZq50vokUFVZBiX0WNa81z9-r9RzTY,392
|
41
47
|
h_ai/infrastructure/playwright/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
42
|
-
h_ai/infrastructure/playwright/playwright_web_content_fetcher.py,sha256=
|
43
|
-
|
44
|
-
|
45
|
-
h_ai_brain-0.0.
|
46
|
-
h_ai_brain-0.0.
|
47
|
-
h_ai_brain-0.0.
|
48
|
-
h_ai_brain-0.0.
|
48
|
+
h_ai/infrastructure/playwright/playwright_web_content_fetcher.py,sha256=FVwcK6hv_6aE4fYlJapLHyxNHsztQkKaulklHabyrEc,2684
|
49
|
+
h_ai/infrastructure/priorityqueue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
50
|
+
h_ai/infrastructure/priorityqueue/in_memory_priority_queue_repository.py,sha256=-DodmvFBUFnglJJmbUFEi3HpqxEdZvYrOpwZrWZrSU0,3262
|
51
|
+
h_ai_brain-0.0.7.dist-info/licenses/LICENSE,sha256=SbvpEU5JIU3yzMMkyzrI0dGqHDoJR_lMKGdl6GZHsy4,11558
|
52
|
+
h_ai_brain-0.0.7.dist-info/licenses/NOTICE.txt,sha256=vxeIKUiGqAePLvDW4AVm3Xh-3BcsvMtCMn1tbsr9zsE,668
|
53
|
+
h_ai_brain-0.0.7.dist-info/METADATA,sha256=KxLN_FGexCNE_QbLb9lpyx_YWEM7-P8VO_Zoy--v1Yk,735
|
54
|
+
h_ai_brain-0.0.7.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
|
55
|
+
h_ai_brain-0.0.7.dist-info/top_level.txt,sha256=3MChDBWvDJV4cEHuZhzeODxQ4ewtw-arOuyaDOc6sIo,5
|
56
|
+
h_ai_brain-0.0.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|