microsoft-agents-hosting-core 0.4.0.dev6__py3-none-any.whl → 0.4.0.dev10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microsoft_agents/hosting/core/storage/__init__.py +20 -1
- microsoft_agents/hosting/core/storage/transcript_info.py +12 -0
- microsoft_agents/hosting/core/storage/transcript_logger.py +209 -0
- microsoft_agents/hosting/core/storage/transcript_memory_store.py +154 -0
- microsoft_agents/hosting/core/storage/transcript_store.py +52 -0
- {microsoft_agents_hosting_core-0.4.0.dev6.dist-info → microsoft_agents_hosting_core-0.4.0.dev10.dist-info}/METADATA +2 -2
- {microsoft_agents_hosting_core-0.4.0.dev6.dist-info → microsoft_agents_hosting_core-0.4.0.dev10.dist-info}/RECORD +9 -6
- microsoft_agents/hosting/core/storage/_storage_test_utils.py +0 -508
- {microsoft_agents_hosting_core-0.4.0.dev6.dist-info → microsoft_agents_hosting_core-0.4.0.dev10.dist-info}/WHEEL +0 -0
- {microsoft_agents_hosting_core-0.4.0.dev6.dist-info → microsoft_agents_hosting_core-0.4.0.dev10.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,24 @@
|
|
|
1
1
|
from .store_item import StoreItem
|
|
2
2
|
from .storage import Storage, AsyncStorageBase
|
|
3
3
|
from .memory_storage import MemoryStorage
|
|
4
|
+
from .transcript_info import TranscriptInfo
|
|
5
|
+
from .transcript_logger import (
|
|
6
|
+
TranscriptLogger,
|
|
7
|
+
ConsoleTranscriptLogger,
|
|
8
|
+
TranscriptLoggerMiddleware,
|
|
9
|
+
FileTranscriptLogger,
|
|
10
|
+
)
|
|
11
|
+
from .transcript_store import TranscriptStore
|
|
4
12
|
|
|
5
|
-
__all__ = [
|
|
13
|
+
__all__ = [
|
|
14
|
+
"StoreItem",
|
|
15
|
+
"Storage",
|
|
16
|
+
"AsyncStorageBase",
|
|
17
|
+
"MemoryStorage",
|
|
18
|
+
"TranscriptInfo",
|
|
19
|
+
"TranscriptLogger",
|
|
20
|
+
"ConsoleTranscriptLogger",
|
|
21
|
+
"TranscriptLoggerMiddleware",
|
|
22
|
+
"TranscriptStore",
|
|
23
|
+
"FileTranscriptLogger",
|
|
24
|
+
]
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class TranscriptInfo:
|
|
10
|
+
channel_id: str = ""
|
|
11
|
+
conversation_id: str = ""
|
|
12
|
+
created_on: datetime = datetime.min.replace(tzinfo=timezone.utc)
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
import copy
|
|
4
|
+
import random
|
|
5
|
+
import string
|
|
6
|
+
import json
|
|
7
|
+
|
|
8
|
+
from abc import ABC, abstractmethod
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from queue import Queue
|
|
11
|
+
from typing import Awaitable, Callable, List, Optional
|
|
12
|
+
|
|
13
|
+
from microsoft_agents.activity import Activity, ChannelAccount
|
|
14
|
+
from microsoft_agents.activity.activity import ConversationReference
|
|
15
|
+
from microsoft_agents.activity.activity_types import ActivityTypes
|
|
16
|
+
from microsoft_agents.activity.conversation_reference import ActivityEventNames
|
|
17
|
+
from microsoft_agents.hosting.core.middleware_set import Middleware, TurnContext
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TranscriptLogger(ABC):
|
|
21
|
+
@abstractmethod
|
|
22
|
+
async def log_activity(self, activity: Activity) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Asynchronously logs an activity.
|
|
25
|
+
|
|
26
|
+
:param activity: The activity to log.
|
|
27
|
+
"""
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ConsoleTranscriptLogger(TranscriptLogger):
|
|
32
|
+
"""
|
|
33
|
+
ConsoleTranscriptLogger writes activities to Console output. This is a DEBUG class, intended for testing
|
|
34
|
+
and log tailing
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
async def log_activity(self, activity: Activity) -> None:
|
|
38
|
+
"""Log an activity to the transcript.
|
|
39
|
+
:param activity:Activity being logged.
|
|
40
|
+
"""
|
|
41
|
+
if not activity:
|
|
42
|
+
raise TypeError("Activity is required")
|
|
43
|
+
|
|
44
|
+
json_data = activity.model_dump_json()
|
|
45
|
+
parsed = json.loads(json_data)
|
|
46
|
+
print(json.dumps(parsed, indent=4))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class FileTranscriptLogger(TranscriptLogger):
|
|
50
|
+
"""
|
|
51
|
+
A TranscriptLogger implementation that appends each activity as JSON to a file. This class appends
|
|
52
|
+
each activity to the given file using basic formatting. This is a DEBUG class, intended for testing
|
|
53
|
+
and log tailing.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
def __init__(self, file_path: str, encoding: Optional[str] = "utf-8"):
|
|
57
|
+
"""
|
|
58
|
+
Initializes the FileTranscriptLogger and opens the file for appending.
|
|
59
|
+
|
|
60
|
+
:param file_path: Path to the transcript log file.
|
|
61
|
+
:param encoding: File encoding (default: utf-8).
|
|
62
|
+
"""
|
|
63
|
+
self.file_path = file_path
|
|
64
|
+
self.encoding = encoding
|
|
65
|
+
|
|
66
|
+
# Open file in append mode to ensure it exists
|
|
67
|
+
self._file = open(self.file_path, "a", encoding=self.encoding)
|
|
68
|
+
|
|
69
|
+
async def log_activity(self, activity: Activity) -> None:
|
|
70
|
+
"""
|
|
71
|
+
Appends the given activity as a JSON line to the file. This method pretty-prints the JSON for readability, which makes
|
|
72
|
+
it non-performant. For production scenarios, consider a more efficient logging mechanism.
|
|
73
|
+
|
|
74
|
+
:param activity: The Activity object to log.
|
|
75
|
+
"""
|
|
76
|
+
if not activity:
|
|
77
|
+
raise TypeError("Activity is required")
|
|
78
|
+
|
|
79
|
+
json_data = activity.model_dump_json()
|
|
80
|
+
parsed = json.loads(json_data)
|
|
81
|
+
|
|
82
|
+
self._file.write(json.dumps(parsed, indent=4))
|
|
83
|
+
|
|
84
|
+
# As this is a logging / debugging class, we want to ensure the data is written out immediately. This is another
|
|
85
|
+
# consideration that makes this class non-performant for production scenarios.
|
|
86
|
+
self._file.flush()
|
|
87
|
+
|
|
88
|
+
def __del__(self):
|
|
89
|
+
if hasattr(self, "_file"):
|
|
90
|
+
self._file.close()
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class TranscriptLoggerMiddleware(Middleware):
|
|
94
|
+
"""Logs incoming and outgoing activities to a TranscriptLogger."""
|
|
95
|
+
|
|
96
|
+
def __init__(self, logger: TranscriptLogger):
|
|
97
|
+
if not logger:
|
|
98
|
+
raise TypeError(
|
|
99
|
+
"TranscriptLoggerMiddleware requires a TranscriptLogger instance."
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
self.logger = logger
|
|
103
|
+
|
|
104
|
+
async def on_turn(
|
|
105
|
+
self, context: TurnContext, logic: Callable[[TurnContext], Awaitable]
|
|
106
|
+
):
|
|
107
|
+
"""Initialization for middleware.
|
|
108
|
+
:param context: Context for the current turn of conversation with the user.
|
|
109
|
+
:param logic: Function to call at the end of the middleware chain.
|
|
110
|
+
"""
|
|
111
|
+
transcript = Queue()
|
|
112
|
+
activity = context.activity
|
|
113
|
+
# Log incoming activity at beginning of turn
|
|
114
|
+
if activity:
|
|
115
|
+
if not activity.from_property:
|
|
116
|
+
activity.from_property = ChannelAccount()
|
|
117
|
+
if not activity.from_property.role:
|
|
118
|
+
activity.from_property.role = "user"
|
|
119
|
+
|
|
120
|
+
# We should not log ContinueConversation events used by skills to initialize the middleware.
|
|
121
|
+
if not (
|
|
122
|
+
context.activity.type == ActivityTypes.event
|
|
123
|
+
and context.activity.name == ActivityEventNames.continue_conversation
|
|
124
|
+
):
|
|
125
|
+
await self._queue_activity(transcript, copy.copy(activity))
|
|
126
|
+
|
|
127
|
+
# hook up onSend pipeline
|
|
128
|
+
# pylint: disable=unused-argument
|
|
129
|
+
async def send_activities_handler(
|
|
130
|
+
ctx: TurnContext,
|
|
131
|
+
activities: List[Activity],
|
|
132
|
+
next_send: Callable[[], Awaitable[None]],
|
|
133
|
+
):
|
|
134
|
+
# Run full pipeline
|
|
135
|
+
responses = await next_send()
|
|
136
|
+
for index, activity in enumerate(activities):
|
|
137
|
+
cloned_activity = copy.copy(activity)
|
|
138
|
+
if responses and index < len(responses):
|
|
139
|
+
cloned_activity.id = responses[index].id
|
|
140
|
+
|
|
141
|
+
# For certain channels, a ResourceResponse with an id is not always sent to the bot.
|
|
142
|
+
# This fix uses the timestamp on the activity to populate its id for logging the transcript
|
|
143
|
+
# If there is no outgoing timestamp, the current time for the bot is used for the activity.id
|
|
144
|
+
if not cloned_activity.id:
|
|
145
|
+
alphanumeric = string.ascii_lowercase + string.digits
|
|
146
|
+
prefix = "g_" + "".join(
|
|
147
|
+
random.choice(alphanumeric) for i in range(5)
|
|
148
|
+
)
|
|
149
|
+
epoch = datetime.fromtimestamp(0, timezone.utc)
|
|
150
|
+
if cloned_activity.timestamp:
|
|
151
|
+
reference = cloned_activity.timestamp
|
|
152
|
+
else:
|
|
153
|
+
reference = datetime.now(timezone.utc)
|
|
154
|
+
delta = (reference - epoch).total_seconds() * 1000
|
|
155
|
+
cloned_activity.id = f"{prefix}{delta}"
|
|
156
|
+
await self._queue_activity(transcript, cloned_activity)
|
|
157
|
+
return responses
|
|
158
|
+
|
|
159
|
+
context.on_send_activities(send_activities_handler)
|
|
160
|
+
|
|
161
|
+
# hook up update activity pipeline
|
|
162
|
+
async def update_activity_handler(
|
|
163
|
+
ctx: TurnContext, activity: Activity, next_update: Callable[[], Awaitable]
|
|
164
|
+
):
|
|
165
|
+
# Run full pipeline
|
|
166
|
+
response = await next_update()
|
|
167
|
+
update_activity = copy.copy(activity)
|
|
168
|
+
update_activity.type = ActivityTypes.message_update
|
|
169
|
+
await self._queue_activity(transcript, update_activity)
|
|
170
|
+
return response
|
|
171
|
+
|
|
172
|
+
context.on_update_activity(update_activity_handler)
|
|
173
|
+
|
|
174
|
+
# hook up delete activity pipeline
|
|
175
|
+
async def delete_activity_handler(
|
|
176
|
+
ctx: TurnContext,
|
|
177
|
+
reference: ConversationReference,
|
|
178
|
+
next_delete: Callable[[], Awaitable],
|
|
179
|
+
):
|
|
180
|
+
# Run full pipeline
|
|
181
|
+
await next_delete()
|
|
182
|
+
|
|
183
|
+
delete_msg = Activity(
|
|
184
|
+
type=ActivityTypes.message_delete, id=reference.activity_id
|
|
185
|
+
)
|
|
186
|
+
deleted_activity: Activity = TurnContext.apply_conversation_reference(
|
|
187
|
+
delete_msg, reference, False
|
|
188
|
+
)
|
|
189
|
+
await self._queue_activity(transcript, deleted_activity)
|
|
190
|
+
|
|
191
|
+
context.on_delete_activity(delete_activity_handler)
|
|
192
|
+
|
|
193
|
+
if logic:
|
|
194
|
+
await logic()
|
|
195
|
+
|
|
196
|
+
# Flush transcript at end of turn
|
|
197
|
+
while not transcript.empty():
|
|
198
|
+
activity = transcript.get()
|
|
199
|
+
if activity is None:
|
|
200
|
+
break
|
|
201
|
+
await self.logger.log_activity(activity)
|
|
202
|
+
transcript.task_done()
|
|
203
|
+
|
|
204
|
+
async def _queue_activity(self, transcript: Queue, activity: Activity) -> None:
|
|
205
|
+
"""Logs the activity.
|
|
206
|
+
:param transcript: transcript.
|
|
207
|
+
:param activity: Activity to log.
|
|
208
|
+
"""
|
|
209
|
+
transcript.put(activity)
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
from threading import Lock
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from typing import List
|
|
7
|
+
from .transcript_logger import TranscriptLogger
|
|
8
|
+
from .transcript_info import TranscriptInfo
|
|
9
|
+
from microsoft_agents.activity import Activity
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TranscriptMemoryStore(TranscriptLogger):
|
|
13
|
+
"""
|
|
14
|
+
An in-memory implementation of the TranscriptLogger for storing and retrieving activities.
|
|
15
|
+
|
|
16
|
+
This class is thread-safe and stores all activities in a list. It supports logging activities,
|
|
17
|
+
retrieving activities for a specific channel and conversation, and filtering by timestamp.
|
|
18
|
+
Activities with a None timestamp are treated as the earliest possible datetime.
|
|
19
|
+
|
|
20
|
+
Note: This class is intended for testing and prototyping purposes only. It does not persist
|
|
21
|
+
data and is not suitable for production use. This store will also grow without bound over
|
|
22
|
+
time, making it especially unsuited for production use.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(self):
|
|
26
|
+
"""
|
|
27
|
+
Initializes the TranscriptMemoryStore.
|
|
28
|
+
"""
|
|
29
|
+
self._transcript = []
|
|
30
|
+
self.lock = Lock()
|
|
31
|
+
|
|
32
|
+
async def log_activity(self, activity: Activity) -> None:
|
|
33
|
+
"""
|
|
34
|
+
Asynchronously logs an activity to the in-memory transcript.
|
|
35
|
+
|
|
36
|
+
:param activity: The Activity object to log. Must have a valid conversation and conversation id.
|
|
37
|
+
:raises ValueError: If activity, activity.conversation, or activity.conversation.id is None.
|
|
38
|
+
"""
|
|
39
|
+
if not activity:
|
|
40
|
+
raise ValueError("Activity cannot be None")
|
|
41
|
+
if not activity.conversation:
|
|
42
|
+
raise ValueError("Activity.Conversation cannot be None")
|
|
43
|
+
if not activity.conversation.id:
|
|
44
|
+
raise ValueError("Activity.Conversation.id cannot be None")
|
|
45
|
+
|
|
46
|
+
with self.lock:
|
|
47
|
+
self._transcript.append(activity)
|
|
48
|
+
|
|
49
|
+
async def get_transcript_activities(
|
|
50
|
+
self,
|
|
51
|
+
channel_id: str,
|
|
52
|
+
conversation_id: str,
|
|
53
|
+
continuation_token: str = None,
|
|
54
|
+
start_date: datetime = datetime.min.replace(tzinfo=timezone.utc),
|
|
55
|
+
) -> tuple[list[Activity], str]:
|
|
56
|
+
"""
|
|
57
|
+
Retrieves activities for a given channel and conversation, optionally filtered by start_date.
|
|
58
|
+
|
|
59
|
+
:param channel_id: The channel ID to filter activities.
|
|
60
|
+
:param conversation_id: The conversation ID to filter activities.
|
|
61
|
+
:param continuation_token: (Unused) Token for pagination.
|
|
62
|
+
:param start_date: Only activities with timestamp >= start_date are returned. None timestamps are treated as datetime.min.
|
|
63
|
+
:return: A tuple containing the filtered list of Activity objects and a continuation token (always None).
|
|
64
|
+
:raises ValueError: If channel_id or conversation_id is None.
|
|
65
|
+
"""
|
|
66
|
+
if not channel_id:
|
|
67
|
+
raise ValueError("channel_id cannot be None")
|
|
68
|
+
if not conversation_id:
|
|
69
|
+
raise ValueError("conversation_id cannot be None")
|
|
70
|
+
|
|
71
|
+
with self.lock:
|
|
72
|
+
# Get the activities that match on channel and conversation id
|
|
73
|
+
relevant_activities = [
|
|
74
|
+
a
|
|
75
|
+
for a in self._transcript
|
|
76
|
+
if a.channel_id == channel_id
|
|
77
|
+
and a.conversation
|
|
78
|
+
and a.conversation.id == conversation_id
|
|
79
|
+
]
|
|
80
|
+
# sort these by timestamp, treating None as datetime.min
|
|
81
|
+
sorted_relevant_activities = sorted(
|
|
82
|
+
relevant_activities,
|
|
83
|
+
key=lambda a: (
|
|
84
|
+
a.timestamp
|
|
85
|
+
if a.timestamp is not None
|
|
86
|
+
else datetime.min.replace(tzinfo=timezone.utc)
|
|
87
|
+
),
|
|
88
|
+
)
|
|
89
|
+
# grab the ones bigger than the requested start date, treating None as datetime.min
|
|
90
|
+
filtered_sorted_activities = [
|
|
91
|
+
a
|
|
92
|
+
for a in sorted_relevant_activities
|
|
93
|
+
if (
|
|
94
|
+
a.timestamp
|
|
95
|
+
if a.timestamp is not None
|
|
96
|
+
else datetime.min.replace(tzinfo=timezone.utc)
|
|
97
|
+
)
|
|
98
|
+
>= start_date
|
|
99
|
+
]
|
|
100
|
+
|
|
101
|
+
return filtered_sorted_activities, None
|
|
102
|
+
|
|
103
|
+
async def delete_transcript(self, channel_id: str, conversation_id: str) -> None:
|
|
104
|
+
"""
|
|
105
|
+
Deletes all activities for a given channel and conversation from the in-memory transcript.
|
|
106
|
+
|
|
107
|
+
:param channel_id: The channel ID whose transcript should be deleted.
|
|
108
|
+
:param conversation_id: The conversation ID whose transcript should be deleted.
|
|
109
|
+
:raises ValueError: If channel_id or conversation_id is None.
|
|
110
|
+
"""
|
|
111
|
+
if not channel_id:
|
|
112
|
+
raise ValueError("channel_id cannot be None")
|
|
113
|
+
if not conversation_id:
|
|
114
|
+
raise ValueError("conversation_id cannot be None")
|
|
115
|
+
|
|
116
|
+
with self.lock:
|
|
117
|
+
self._transcript = [
|
|
118
|
+
a
|
|
119
|
+
for a in self._transcript
|
|
120
|
+
if not (
|
|
121
|
+
a.channel_id == channel_id
|
|
122
|
+
and a.conversation
|
|
123
|
+
and a.conversation.id == conversation_id
|
|
124
|
+
)
|
|
125
|
+
]
|
|
126
|
+
|
|
127
|
+
async def list_transcripts(
|
|
128
|
+
self, channel_id: str, continuation_token: str = None
|
|
129
|
+
) -> tuple[list[TranscriptInfo], str]:
|
|
130
|
+
"""
|
|
131
|
+
Lists all transcripts (unique conversation IDs) for a given channel.
|
|
132
|
+
|
|
133
|
+
:param channel_id: The channel ID to list transcripts for.
|
|
134
|
+
:param continuation_token: (Unused) Token for pagination.
|
|
135
|
+
:return: A tuple containing a list of TranscriptInfo objects and a continuation token (always None).
|
|
136
|
+
:raises ValueError: If channel_id is None.
|
|
137
|
+
"""
|
|
138
|
+
if not channel_id:
|
|
139
|
+
raise ValueError("channel_id cannot be None")
|
|
140
|
+
|
|
141
|
+
with self.lock:
|
|
142
|
+
relevant_activities = [
|
|
143
|
+
a for a in self._transcript if a.channel_id == channel_id
|
|
144
|
+
]
|
|
145
|
+
conversations = set(
|
|
146
|
+
a.conversation.id
|
|
147
|
+
for a in relevant_activities
|
|
148
|
+
if a.conversation and a.conversation.id
|
|
149
|
+
)
|
|
150
|
+
transcript_infos = [
|
|
151
|
+
TranscriptInfo(channel_id=channel_id, conversation_id=conversation_id)
|
|
152
|
+
for conversation_id in conversations
|
|
153
|
+
]
|
|
154
|
+
return transcript_infos, None
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from microsoft_agents.activity import Activity
|
|
7
|
+
from .transcript_info import TranscriptInfo
|
|
8
|
+
from .transcript_logger import TranscriptLogger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class TranscriptStore(TranscriptLogger):
|
|
12
|
+
@abstractmethod
|
|
13
|
+
async def get_transcript_activities(
|
|
14
|
+
self,
|
|
15
|
+
channel_id: str,
|
|
16
|
+
conversation_id: str,
|
|
17
|
+
continuation_token: str = None,
|
|
18
|
+
start_date: datetime = datetime.min.replace(tzinfo=timezone.utc),
|
|
19
|
+
) -> tuple[list[Activity], str]:
|
|
20
|
+
"""
|
|
21
|
+
Asynchronously retrieves activities from a transcript.
|
|
22
|
+
|
|
23
|
+
:param channel_id: The channel ID of the conversation.
|
|
24
|
+
:param conversation_id: The conversation ID.
|
|
25
|
+
:param continuation_token: (Optional) A token to continue retrieving activities from a specific point.
|
|
26
|
+
:param start_date: (Optional) The start date to filter activities.
|
|
27
|
+
:return: A tuple containing a list of activities and a continuation token.
|
|
28
|
+
"""
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
async def list_transcripts(
|
|
33
|
+
self, channel_id: str, continuation_token: str = None
|
|
34
|
+
) -> tuple[list[TranscriptInfo, str]]:
|
|
35
|
+
"""
|
|
36
|
+
Asynchronously lists transcripts for a given channel.
|
|
37
|
+
|
|
38
|
+
:param channel_id: The channel ID to list transcripts for.
|
|
39
|
+
:param continuation_token: (Optional) A token to continue listing transcripts from a specific point.
|
|
40
|
+
:return: A tuple containing a list of transcripts and a continuation token.
|
|
41
|
+
"""
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
async def delete_transcript(self, channel_id: str, conversation_id: str) -> None:
|
|
46
|
+
"""
|
|
47
|
+
Asynchronously deletes a transcript.
|
|
48
|
+
|
|
49
|
+
:param channel_id: The channel ID of the conversation.
|
|
50
|
+
:param conversation_id: The conversation ID.
|
|
51
|
+
"""
|
|
52
|
+
pass
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: microsoft-agents-hosting-core
|
|
3
|
-
Version: 0.4.0.
|
|
3
|
+
Version: 0.4.0.dev10
|
|
4
4
|
Summary: Core library for Microsoft Agents
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
Project-URL: Homepage, https://github.com/microsoft/Agents
|
|
@@ -8,7 +8,7 @@ Classifier: Programming Language :: Python :: 3
|
|
|
8
8
|
Classifier: License :: OSI Approved :: MIT License
|
|
9
9
|
Classifier: Operating System :: OS Independent
|
|
10
10
|
Requires-Python: >=3.9
|
|
11
|
-
Requires-Dist: microsoft-agents-activity==0.4.0.
|
|
11
|
+
Requires-Dist: microsoft-agents-activity==0.4.0.dev10
|
|
12
12
|
Requires-Dist: pyjwt>=2.10.1
|
|
13
13
|
Requires-Dist: isodate>=0.6.1
|
|
14
14
|
Requires-Dist: azure-core>=1.30.0
|
|
@@ -70,14 +70,17 @@ microsoft_agents/hosting/core/state/__init__.py,sha256=yckKi1wg_86ng-DL9Q3R49QiW
|
|
|
70
70
|
microsoft_agents/hosting/core/state/agent_state.py,sha256=-6kHxVCl6oUT-XjDXkPOPw0fD227Uy3zNzqMInwFeJc,12891
|
|
71
71
|
microsoft_agents/hosting/core/state/state_property_accessor.py,sha256=kpiNnzkZ6el-oRITRbRkk1Faa_CPFxpJQdvSGxIJP70,1392
|
|
72
72
|
microsoft_agents/hosting/core/state/user_state.py,sha256=zEigX-sroNAyoQAxQjG1OgmJQKjk1zOkdeqylFg7M2E,1484
|
|
73
|
-
microsoft_agents/hosting/core/storage/__init__.py,sha256=
|
|
74
|
-
microsoft_agents/hosting/core/storage/_storage_test_utils.py,sha256=QwavT0mjdknKHr_70HUZflksA4OVsfxBbS_JoVWIoDc,18315
|
|
73
|
+
microsoft_agents/hosting/core/storage/__init__.py,sha256=X2jWX0qc2YdFNhwOct5Q7VJ9Q5VnOpKB0RqC7HG2_Qs,611
|
|
75
74
|
microsoft_agents/hosting/core/storage/_type_aliases.py,sha256=VCKtjiCBrhEsGSm3zVVSSccdoiY02GYhABvrLjhAcz8,72
|
|
76
75
|
microsoft_agents/hosting/core/storage/error_handling.py,sha256=zH34d7s4pJG_uajpBWhrtTpH2eMy88kSKaqvOqtbgzY,1265
|
|
77
76
|
microsoft_agents/hosting/core/storage/memory_storage.py,sha256=NADem1wQE1MOG1qMriYw4NjILHEBDbIG5HT6wvHfG2M,2353
|
|
78
77
|
microsoft_agents/hosting/core/storage/storage.py,sha256=vft_Kw4pkzo8NnBEyDx7gAn1Ndg2I9ePaxnuxbKVHzs,3227
|
|
79
78
|
microsoft_agents/hosting/core/storage/store_item.py,sha256=4LSkuI0H0lgWig88YoHFn6BP8Bx44YbyuvqBvaBNdEM,276
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
79
|
+
microsoft_agents/hosting/core/storage/transcript_info.py,sha256=5VN32j99tshChAffvuZ6D3GH3ABCZsQGHC_bYDAwFOk,328
|
|
80
|
+
microsoft_agents/hosting/core/storage/transcript_logger.py,sha256=PxAZTZVFRC2q_i1JABo-JFOPEv9QyXCMe5wZdiiq6_g,8007
|
|
81
|
+
microsoft_agents/hosting/core/storage/transcript_memory_store.py,sha256=xdX4CtyDczN5jCoi-ZtXzBZq52ahsk3IQ_Y6qkmub8U,6323
|
|
82
|
+
microsoft_agents/hosting/core/storage/transcript_store.py,sha256=ka74o0WvI5GhMZcFqSxVdamBhGzZcDZe6VNkG-sMy74,1944
|
|
83
|
+
microsoft_agents_hosting_core-0.4.0.dev10.dist-info/METADATA,sha256=jLXmnVD1NnbBccGs57HhKye8o-c_d2ikm-hYHbjCckg,586
|
|
84
|
+
microsoft_agents_hosting_core-0.4.0.dev10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
85
|
+
microsoft_agents_hosting_core-0.4.0.dev10.dist-info/top_level.txt,sha256=lWKcT4v6fTA_NgsuHdNvuMjSrkiBMXohn64ApY7Xi8A,17
|
|
86
|
+
microsoft_agents_hosting_core-0.4.0.dev10.dist-info/RECORD,,
|
|
@@ -1,508 +0,0 @@
|
|
|
1
|
-
import pytest
|
|
2
|
-
import gc
|
|
3
|
-
from copy import deepcopy
|
|
4
|
-
from abc import ABC
|
|
5
|
-
from typing import Any
|
|
6
|
-
|
|
7
|
-
from .storage import Storage
|
|
8
|
-
from .store_item import StoreItem
|
|
9
|
-
from ._type_aliases import JSON
|
|
10
|
-
from .memory_storage import MemoryStorage
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class MockStoreItem(StoreItem):
|
|
14
|
-
"""Test implementation of StoreItem for testing purposes"""
|
|
15
|
-
|
|
16
|
-
def __init__(self, data: dict[str, Any] = None):
|
|
17
|
-
self.data = data or {}
|
|
18
|
-
|
|
19
|
-
def store_item_to_json(self) -> JSON:
|
|
20
|
-
return self.data
|
|
21
|
-
|
|
22
|
-
@staticmethod
|
|
23
|
-
def from_json_to_store_item(json_data: JSON) -> "MockStoreItem":
|
|
24
|
-
return MockStoreItem(json_data)
|
|
25
|
-
|
|
26
|
-
def __eq__(self, other):
|
|
27
|
-
if not isinstance(other, MockStoreItem):
|
|
28
|
-
return False
|
|
29
|
-
return self.data == other.data
|
|
30
|
-
|
|
31
|
-
def __repr__(self):
|
|
32
|
-
return f"MockStoreItem({self.data})"
|
|
33
|
-
|
|
34
|
-
def deepcopy(self):
|
|
35
|
-
return MockStoreItem(my_deepcopy(self.data))
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
class MockStoreItemB(MockStoreItem):
|
|
39
|
-
"""Another test implementation of StoreItem for testing purposes"""
|
|
40
|
-
|
|
41
|
-
def __init__(self, data: dict[str, Any] = None, other_field: bool = True):
|
|
42
|
-
super().__init__(data or {})
|
|
43
|
-
self.other_field = other_field
|
|
44
|
-
|
|
45
|
-
def store_item_to_json(self) -> JSON:
|
|
46
|
-
return [self.data, self.other_field]
|
|
47
|
-
|
|
48
|
-
@staticmethod
|
|
49
|
-
def from_json_to_store_item(json_data: JSON) -> "MockStoreItem":
|
|
50
|
-
return MockStoreItemB(json_data[0], json_data[1])
|
|
51
|
-
|
|
52
|
-
def __eq__(self, other):
|
|
53
|
-
if not isinstance(other, MockStoreItemB):
|
|
54
|
-
return False
|
|
55
|
-
return self.data == other.data and self.other_field == other.other_field
|
|
56
|
-
|
|
57
|
-
def deepcopy(self):
|
|
58
|
-
return MockStoreItemB(my_deepcopy(self.data), self.other_field)
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def my_deepcopy(original):
|
|
62
|
-
"""Deep copy an object, including StoreItem instances."""
|
|
63
|
-
|
|
64
|
-
iter_obj = None
|
|
65
|
-
if isinstance(original, list):
|
|
66
|
-
iter_obj = enumerate(original)
|
|
67
|
-
elif isinstance(original, dict):
|
|
68
|
-
iter_obj = original.items()
|
|
69
|
-
elif isinstance(original, MockStoreItem):
|
|
70
|
-
return original.deepcopy()
|
|
71
|
-
else:
|
|
72
|
-
return deepcopy(original)
|
|
73
|
-
|
|
74
|
-
obj = {} if isinstance(original, dict) else ([None] * len(original))
|
|
75
|
-
for key, value in iter_obj:
|
|
76
|
-
obj[key] = my_deepcopy(value)
|
|
77
|
-
return obj
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def subsets(lst, n=-1):
|
|
81
|
-
"""Generate all subsets of a list up to length n. If n is -1, all subsets are generated.
|
|
82
|
-
|
|
83
|
-
Only contiguous subsets are generated.
|
|
84
|
-
"""
|
|
85
|
-
if n < 0:
|
|
86
|
-
n = len(lst)
|
|
87
|
-
subsets = []
|
|
88
|
-
for i in range(len(lst) + 1):
|
|
89
|
-
for j in range(0, i):
|
|
90
|
-
if 1 <= i - j <= n:
|
|
91
|
-
subsets.append(lst[j:i])
|
|
92
|
-
return subsets
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
# bootstrapping class to compare against
|
|
96
|
-
# if this class is correct, then the tests are correct
|
|
97
|
-
class StorageBaseline(Storage):
|
|
98
|
-
""" "A simple in-memory storage implementation for testing purposes."""
|
|
99
|
-
|
|
100
|
-
def __init__(self, initial_data: dict = None):
|
|
101
|
-
self._memory = deepcopy(initial_data) or {}
|
|
102
|
-
self._key_history = set(initial_data.keys()) if initial_data else set()
|
|
103
|
-
|
|
104
|
-
def read(self, keys: list[str]) -> dict[str, Any]:
|
|
105
|
-
self._key_history.update(keys)
|
|
106
|
-
return {key: self._memory.get(key) for key in keys if key in self._memory}
|
|
107
|
-
|
|
108
|
-
def write(self, changes: dict[str, Any]) -> None:
|
|
109
|
-
self._key_history.update(changes.keys())
|
|
110
|
-
self._memory.update(changes)
|
|
111
|
-
|
|
112
|
-
def delete(self, keys: list[str]) -> None:
|
|
113
|
-
self._key_history.update(keys)
|
|
114
|
-
for key in keys:
|
|
115
|
-
if key in self._memory:
|
|
116
|
-
del self._memory[key]
|
|
117
|
-
|
|
118
|
-
async def equals(self, other) -> bool:
|
|
119
|
-
"""
|
|
120
|
-
Compare the items for all keys seen by this mock instance.
|
|
121
|
-
|
|
122
|
-
Note:
|
|
123
|
-
This is an extra safety measure, and I've made the
|
|
124
|
-
executive decision to not test this method itself
|
|
125
|
-
because passing tests with calls to this method
|
|
126
|
-
is also dependent on the correctness of other
|
|
127
|
-
aspects, based on the other assertions in the tests.
|
|
128
|
-
"""
|
|
129
|
-
for key in self._key_history:
|
|
130
|
-
if key not in self._memory:
|
|
131
|
-
if len(await other.read([key], target_cls=MockStoreItem)) > 0:
|
|
132
|
-
breakpoint()
|
|
133
|
-
return False # key should not exist in other
|
|
134
|
-
continue
|
|
135
|
-
|
|
136
|
-
# key exists in baseline instance, so let's see if the values match
|
|
137
|
-
item = self._memory.get(key, None)
|
|
138
|
-
target_cls = type(item)
|
|
139
|
-
res = await other.read([key], target_cls=target_cls)
|
|
140
|
-
|
|
141
|
-
if key not in res or item != res[key]:
|
|
142
|
-
breakpoint()
|
|
143
|
-
return False
|
|
144
|
-
return True
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
class StorageTestsCommon(ABC):
|
|
148
|
-
"""Common fixtures for Storage implementations."""
|
|
149
|
-
|
|
150
|
-
KEY_LIST = [
|
|
151
|
-
"f",
|
|
152
|
-
"a!0dslfj",
|
|
153
|
-
"\\?/#\t\n\r*",
|
|
154
|
-
"527",
|
|
155
|
-
"test.txt",
|
|
156
|
-
"_-__--",
|
|
157
|
-
"VAR",
|
|
158
|
-
"None",
|
|
159
|
-
"multi word key",
|
|
160
|
-
]
|
|
161
|
-
|
|
162
|
-
READ_KEY_LIST = KEY_LIST + (["5", "20", "100", "nonexistent_key", "-"])
|
|
163
|
-
|
|
164
|
-
STATE_LIST = [
|
|
165
|
-
{key: MockStoreItem({"id": key, "value": f"value{key}"}) for key in subset}
|
|
166
|
-
for subset in subsets(KEY_LIST, 3)
|
|
167
|
-
if len(subset) == 3
|
|
168
|
-
]
|
|
169
|
-
|
|
170
|
-
@pytest.fixture(params=[dict()] + STATE_LIST)
|
|
171
|
-
def initial_state(self, request):
|
|
172
|
-
return request.param
|
|
173
|
-
|
|
174
|
-
@pytest.fixture(params=KEY_LIST)
|
|
175
|
-
def key(self, request):
|
|
176
|
-
return request.param
|
|
177
|
-
|
|
178
|
-
@pytest.fixture(
|
|
179
|
-
params=[subset for subset in subsets(READ_KEY_LIST, 2) if len(subset) == 2]
|
|
180
|
-
)
|
|
181
|
-
def keys(self, request):
|
|
182
|
-
return request.param
|
|
183
|
-
|
|
184
|
-
@pytest.fixture(params=subsets(KEY_LIST, 2))
|
|
185
|
-
def changes(self, request):
|
|
186
|
-
changes_obj = {}
|
|
187
|
-
keys = request.param
|
|
188
|
-
changes_obj["new_key"] = MockStoreItemB(
|
|
189
|
-
{"field": "new_value_for_new_key"}, True
|
|
190
|
-
)
|
|
191
|
-
for i, key in enumerate(keys):
|
|
192
|
-
if i % 2 == 0:
|
|
193
|
-
changes_obj[key] = MockStoreItemB(
|
|
194
|
-
{"data": f"value{key}"}, (i // 2) % 2 == 0
|
|
195
|
-
)
|
|
196
|
-
else:
|
|
197
|
-
changes_obj[key] = MockStoreItem(
|
|
198
|
-
{"id": key, "value": f"new_value_for_{key}"}
|
|
199
|
-
)
|
|
200
|
-
changes_obj["new_key_2"] = MockStoreItem({"field": "new_value_for_new_key_2"})
|
|
201
|
-
return changes_obj
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
class CRUDStorageTests(StorageTestsCommon):
|
|
205
|
-
"""Tests for Storage implementations that support CRUD operations.
|
|
206
|
-
|
|
207
|
-
To use, subclass and implement the `storage` method.
|
|
208
|
-
"""
|
|
209
|
-
|
|
210
|
-
async def storage(self, initial_data=None, existing=False) -> Storage:
|
|
211
|
-
"""Return a Storage instance to be tested.
|
|
212
|
-
:param initial_data: The initial data to populate the storage with.
|
|
213
|
-
:param existing: If True, the storage instance should connect to an existing store.
|
|
214
|
-
"""
|
|
215
|
-
raise NotImplementedError("Subclasses must implement this")
|
|
216
|
-
|
|
217
|
-
@pytest.mark.asyncio
|
|
218
|
-
async def test_read_individual(self, initial_state, key):
|
|
219
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
220
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
221
|
-
storage = await self.storage(initial_state)
|
|
222
|
-
expected = baseline_storage.read([key])
|
|
223
|
-
actual = await storage.read([key], target_cls=MockStoreItem)
|
|
224
|
-
assert actual == expected
|
|
225
|
-
assert await baseline_storage.equals(storage)
|
|
226
|
-
assert initial_state == initial_state_copy
|
|
227
|
-
|
|
228
|
-
@pytest.mark.asyncio
|
|
229
|
-
async def test_read(self, initial_state, keys):
|
|
230
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
231
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
232
|
-
storage = await self.storage(initial_state)
|
|
233
|
-
expected = baseline_storage.read(keys)
|
|
234
|
-
actual = await storage.read(keys, target_cls=MockStoreItem)
|
|
235
|
-
assert actual == expected
|
|
236
|
-
assert await baseline_storage.equals(storage)
|
|
237
|
-
assert initial_state == initial_state_copy
|
|
238
|
-
|
|
239
|
-
@pytest.mark.asyncio
|
|
240
|
-
async def test_read_missing_key(self, initial_state):
|
|
241
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
242
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
243
|
-
storage = await self.storage(initial_state)
|
|
244
|
-
keys = ["5", "20", "100", "nonexistent_key", "-"]
|
|
245
|
-
expected = baseline_storage.read(keys)
|
|
246
|
-
actual = await storage.read(keys, target_cls=MockStoreItem)
|
|
247
|
-
assert actual == expected
|
|
248
|
-
assert await baseline_storage.equals(storage)
|
|
249
|
-
assert initial_state == initial_state_copy
|
|
250
|
-
|
|
251
|
-
@pytest.mark.asyncio
|
|
252
|
-
async def test_read_errors(self, initial_state):
|
|
253
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
254
|
-
storage = await self.storage(initial_state)
|
|
255
|
-
with pytest.raises(ValueError):
|
|
256
|
-
await storage.read([], target_cls=MockStoreItem)
|
|
257
|
-
with pytest.raises(ValueError):
|
|
258
|
-
await storage.read(None, target_cls=MockStoreItem)
|
|
259
|
-
with pytest.raises(ValueError):
|
|
260
|
-
await storage.read([""], target_cls=MockStoreItem)
|
|
261
|
-
with pytest.raises(ValueError):
|
|
262
|
-
await storage.read(["key"], target_cls=None)
|
|
263
|
-
assert initial_state == initial_state_copy
|
|
264
|
-
|
|
265
|
-
@pytest.mark.asyncio
|
|
266
|
-
async def test_write_individual(self, initial_state, key):
|
|
267
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
268
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
269
|
-
storage = await self.storage(initial_state)
|
|
270
|
-
change = {key: MockStoreItem({key: f"new_value_for_{key}!"})}
|
|
271
|
-
baseline_storage.write(change)
|
|
272
|
-
await storage.write(change)
|
|
273
|
-
assert await baseline_storage.equals(storage)
|
|
274
|
-
assert initial_state == initial_state_copy
|
|
275
|
-
|
|
276
|
-
@pytest.mark.asyncio
|
|
277
|
-
async def test_write_individual_different_target_cls(self, initial_state, key):
|
|
278
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
279
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
280
|
-
storage = await self.storage(initial_state)
|
|
281
|
-
change = {
|
|
282
|
-
key: MockStoreItemB({key: f"new_value_for_{key}!"}, other_field=False)
|
|
283
|
-
}
|
|
284
|
-
baseline_storage.write(change)
|
|
285
|
-
await storage.write(change)
|
|
286
|
-
assert await baseline_storage.equals(storage)
|
|
287
|
-
change = {key: MockStoreItemB({key: f"new_{key}"}, other_field=True)}
|
|
288
|
-
baseline_storage.write(change)
|
|
289
|
-
await storage.write(change)
|
|
290
|
-
assert await baseline_storage.equals(storage)
|
|
291
|
-
assert initial_state == initial_state_copy
|
|
292
|
-
|
|
293
|
-
@pytest.mark.asyncio
|
|
294
|
-
async def test_write_same_values(self, initial_state):
|
|
295
|
-
if not initial_state:
|
|
296
|
-
return
|
|
297
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
298
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
299
|
-
storage = await self.storage(initial_state)
|
|
300
|
-
changes = {key: value for key, value in initial_state.items()}
|
|
301
|
-
baseline_storage.write(changes)
|
|
302
|
-
await storage.write(changes)
|
|
303
|
-
assert await baseline_storage.equals(storage)
|
|
304
|
-
assert initial_state == initial_state_copy
|
|
305
|
-
|
|
306
|
-
@pytest.mark.asyncio
|
|
307
|
-
async def test_write(self, initial_state, changes):
|
|
308
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
309
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
310
|
-
storage = await self.storage(initial_state)
|
|
311
|
-
baseline_storage.write(changes)
|
|
312
|
-
await storage.write(changes)
|
|
313
|
-
assert await baseline_storage.equals(storage)
|
|
314
|
-
baseline_storage.write(initial_state)
|
|
315
|
-
if initial_state:
|
|
316
|
-
await storage.write(initial_state)
|
|
317
|
-
assert await baseline_storage.equals(storage)
|
|
318
|
-
assert initial_state == initial_state_copy
|
|
319
|
-
|
|
320
|
-
@pytest.mark.asyncio
|
|
321
|
-
async def test_write_errors(self, initial_state):
|
|
322
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
323
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
324
|
-
storage = await self.storage(initial_state)
|
|
325
|
-
with pytest.raises(ValueError):
|
|
326
|
-
await storage.write({})
|
|
327
|
-
with pytest.raises(ValueError):
|
|
328
|
-
await storage.write(None)
|
|
329
|
-
assert await baseline_storage.equals(storage)
|
|
330
|
-
assert initial_state == initial_state_copy
|
|
331
|
-
|
|
332
|
-
@pytest.mark.asyncio
|
|
333
|
-
async def test_delete_individual(self, initial_state, key):
|
|
334
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
335
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
336
|
-
storage = await self.storage(initial_state)
|
|
337
|
-
baseline_storage.delete([key])
|
|
338
|
-
await storage.delete([key])
|
|
339
|
-
assert await baseline_storage.equals(storage)
|
|
340
|
-
assert initial_state == initial_state_copy
|
|
341
|
-
|
|
342
|
-
@pytest.mark.asyncio
|
|
343
|
-
async def test_delete(self, initial_state, keys):
|
|
344
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
345
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
346
|
-
storage = await self.storage(initial_state)
|
|
347
|
-
baseline_storage.delete(keys)
|
|
348
|
-
await storage.delete(keys)
|
|
349
|
-
assert await baseline_storage.equals(storage)
|
|
350
|
-
assert initial_state == initial_state_copy
|
|
351
|
-
|
|
352
|
-
@pytest.mark.asyncio
|
|
353
|
-
async def test_delete_missing_key(self, initial_state):
|
|
354
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
355
|
-
baseline_storage = StorageBaseline(initial_state)
|
|
356
|
-
storage = await self.storage(initial_state)
|
|
357
|
-
keys = ["5", "20", "100", "nonexistent_key", "-"]
|
|
358
|
-
baseline_storage.delete(keys)
|
|
359
|
-
await storage.delete(keys)
|
|
360
|
-
assert await baseline_storage.equals(storage)
|
|
361
|
-
assert initial_state == initial_state_copy
|
|
362
|
-
|
|
363
|
-
@pytest.mark.asyncio
|
|
364
|
-
async def test_delete_errors(self, initial_state):
|
|
365
|
-
initial_state_copy = my_deepcopy(initial_state)
|
|
366
|
-
storage = await self.storage(initial_state)
|
|
367
|
-
with pytest.raises(ValueError):
|
|
368
|
-
await storage.read([])
|
|
369
|
-
with pytest.raises(ValueError):
|
|
370
|
-
await storage.read(None)
|
|
371
|
-
assert initial_state == initial_state_copy
|
|
372
|
-
|
|
373
|
-
@pytest.mark.asyncio
|
|
374
|
-
async def test_flow(self):
|
|
375
|
-
baseline_storage = StorageBaseline()
|
|
376
|
-
storage = await self.storage()
|
|
377
|
-
|
|
378
|
-
res = await storage.read(["key"], target_cls=MockStoreItemB)
|
|
379
|
-
assert len(res) == 0
|
|
380
|
-
assert await baseline_storage.equals(storage)
|
|
381
|
-
|
|
382
|
-
changes = {
|
|
383
|
-
"key_a": MockStoreItem({"id": "key_a", "value": "value_a"}),
|
|
384
|
-
"key_b": MockStoreItemB(
|
|
385
|
-
{"id": "key_b", "value": "value_b"}, other_field=False
|
|
386
|
-
),
|
|
387
|
-
}
|
|
388
|
-
changes_copy = my_deepcopy(changes)
|
|
389
|
-
|
|
390
|
-
baseline_storage.write(changes)
|
|
391
|
-
await storage.write(changes)
|
|
392
|
-
|
|
393
|
-
assert (
|
|
394
|
-
await storage.read(["key_a"], target_cls=MockStoreItem)
|
|
395
|
-
) == baseline_storage.read(["key_a"])
|
|
396
|
-
assert (
|
|
397
|
-
await storage.read(["key_b"], target_cls=MockStoreItemB)
|
|
398
|
-
) == baseline_storage.read(["key_b"])
|
|
399
|
-
assert changes_copy == changes
|
|
400
|
-
|
|
401
|
-
baseline_storage.delete(["key_a"])
|
|
402
|
-
await storage.delete(["key_a"])
|
|
403
|
-
assert await baseline_storage.equals(storage)
|
|
404
|
-
|
|
405
|
-
change = {"key_b": MockStoreItem({"id": "key_b", "value": "new_value_b"})}
|
|
406
|
-
baseline_storage.write(change)
|
|
407
|
-
await storage.write(change)
|
|
408
|
-
|
|
409
|
-
assert await baseline_storage.equals(storage)
|
|
410
|
-
assert (
|
|
411
|
-
await storage.read(["key_b"], target_cls=MockStoreItem)
|
|
412
|
-
) == baseline_storage.read(["key_b"])
|
|
413
|
-
|
|
414
|
-
with pytest.raises(ValueError):
|
|
415
|
-
await storage.read([], target_cls=MockStoreItem)
|
|
416
|
-
with pytest.raises(ValueError):
|
|
417
|
-
await storage.read(["key_b"], target_cls=None)
|
|
418
|
-
|
|
419
|
-
change = {
|
|
420
|
-
"key_c": MockStoreItemB(
|
|
421
|
-
{"id": "key_c", "value": "value_c"}, other_field=True
|
|
422
|
-
)
|
|
423
|
-
}
|
|
424
|
-
baseline_storage.write(change)
|
|
425
|
-
await storage.write(change)
|
|
426
|
-
assert (
|
|
427
|
-
await storage.read(["key_a", "key_b"], target_cls=MockStoreItem)
|
|
428
|
-
) == baseline_storage.read(["key_a", "key_b"])
|
|
429
|
-
assert (
|
|
430
|
-
await storage.read(["key_a", "key_c"], target_cls=MockStoreItemB)
|
|
431
|
-
) == baseline_storage.read(["key_a", "key_c"])
|
|
432
|
-
|
|
433
|
-
item_parent_class = (await storage.read(["key_c"], target_cls=MockStoreItem))[
|
|
434
|
-
"key_c"
|
|
435
|
-
]
|
|
436
|
-
item_child_class = (await storage.read(["key_c"], target_cls=MockStoreItemB))[
|
|
437
|
-
"key_c"
|
|
438
|
-
]
|
|
439
|
-
assert item_parent_class.data[0] == item_child_class.data
|
|
440
|
-
assert item_child_class.other_field == True
|
|
441
|
-
|
|
442
|
-
with pytest.raises(ValueError):
|
|
443
|
-
await storage.write({})
|
|
444
|
-
with pytest.raises(Exception):
|
|
445
|
-
await storage.read(["key_b"], target_cls=MockStoreItemB)
|
|
446
|
-
assert await baseline_storage.equals(storage)
|
|
447
|
-
|
|
448
|
-
if not isinstance(storage, MemoryStorage):
|
|
449
|
-
# if not memory storage, then items should persist
|
|
450
|
-
del storage
|
|
451
|
-
gc.collect()
|
|
452
|
-
storage_alt = await self.storage(existing=True)
|
|
453
|
-
assert await baseline_storage.equals(storage_alt)
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
class QuickCRUDStorageTests(CRUDStorageTests):
|
|
457
|
-
"""Reduced set of permutations for quicker tests. Useful for debugging."""
|
|
458
|
-
|
|
459
|
-
KEY_LIST = ["\\?/#\t\n\r*", "test.txt"]
|
|
460
|
-
|
|
461
|
-
READ_KEY_LIST = KEY_LIST + ["nonexistent_key"]
|
|
462
|
-
|
|
463
|
-
STATE_LIST = [
|
|
464
|
-
{key: MockStoreItem({"id": key, "value": f"value{key}"}) for key in KEY_LIST}
|
|
465
|
-
]
|
|
466
|
-
|
|
467
|
-
@pytest.fixture(params=STATE_LIST)
|
|
468
|
-
def initial_state(self, request):
|
|
469
|
-
return request.param
|
|
470
|
-
|
|
471
|
-
@pytest.fixture(params=KEY_LIST)
|
|
472
|
-
def key(self, request):
|
|
473
|
-
return request.param
|
|
474
|
-
|
|
475
|
-
@pytest.fixture(params=[KEY_LIST])
|
|
476
|
-
def keys(self, request):
|
|
477
|
-
return request.param
|
|
478
|
-
|
|
479
|
-
@pytest.fixture(params=subsets(KEY_LIST, 2))
|
|
480
|
-
def changes(self, request):
|
|
481
|
-
changes_obj = {}
|
|
482
|
-
keys = request.param
|
|
483
|
-
changes_obj["new_key"] = MockStoreItemB(
|
|
484
|
-
{"field": "new_value_for_new_key"}, True
|
|
485
|
-
)
|
|
486
|
-
for i, key in enumerate(keys):
|
|
487
|
-
if i % 2 == 0:
|
|
488
|
-
changes_obj[key] = MockStoreItemB(
|
|
489
|
-
{"data": f"value{key}"}, (i // 2) % 2 == 0
|
|
490
|
-
)
|
|
491
|
-
else:
|
|
492
|
-
changes_obj[key] = MockStoreItem(
|
|
493
|
-
{"id": key, "value": f"new_value_for_{key}"}
|
|
494
|
-
)
|
|
495
|
-
changes_obj["new_key_2"] = MockStoreItem({"field": "new_value_for_new_key_2"})
|
|
496
|
-
return changes_obj
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
def debug_print(*args):
|
|
500
|
-
"""Print debug information clearly separated in the console."""
|
|
501
|
-
print("\n" * 2)
|
|
502
|
-
print("--- DEBUG ---")
|
|
503
|
-
for arg in args:
|
|
504
|
-
print("\n" * 2)
|
|
505
|
-
print(arg)
|
|
506
|
-
print("\n" * 2)
|
|
507
|
-
print("--- ----- ---")
|
|
508
|
-
print("\n" * 2)
|
|
File without changes
|