AbstractMemory 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractmemory/__init__.py +744 -31
- abstractmemory/cognitive/__init__.py +1 -0
- abstractmemory/components/__init__.py +1 -0
- abstractmemory/components/core.py +112 -0
- abstractmemory/components/episodic.py +68 -0
- abstractmemory/components/semantic.py +102 -0
- abstractmemory/components/working.py +50 -0
- abstractmemory/core/__init__.py +1 -0
- abstractmemory/core/interfaces.py +95 -0
- abstractmemory/core/temporal.py +100 -0
- abstractmemory/graph/__init__.py +1 -0
- abstractmemory/graph/knowledge_graph.py +178 -0
- abstractmemory/simple.py +151 -0
- abstractmemory/storage/__init__.py +16 -0
- abstractmemory/storage/dual_manager.py +278 -0
- abstractmemory/storage/lancedb_storage.py +425 -0
- abstractmemory/storage/markdown_storage.py +447 -0
- abstractmemory-0.1.0.dist-info/METADATA +331 -0
- abstractmemory-0.1.0.dist-info/RECORD +22 -0
- {abstractmemory-0.0.1.dist-info → abstractmemory-0.1.0.dist-info}/licenses/LICENSE +4 -1
- abstractmemory-0.0.1.dist-info/METADATA +0 -94
- abstractmemory-0.0.1.dist-info/RECORD +0 -6
- {abstractmemory-0.0.1.dist-info → abstractmemory-0.1.0.dist-info}/WHEEL +0 -0
- {abstractmemory-0.0.1.dist-info → abstractmemory-0.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Markdown Storage Backend for human-readable, observable AI memory.
|
|
3
|
+
Provides complete transparency into AI memory evolution.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import json
|
|
8
|
+
import uuid
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Optional, Dict, List, Any
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
import re
|
|
13
|
+
|
|
14
|
+
from ..core.interfaces import IStorage
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class MarkdownStorage(IStorage):
|
|
18
|
+
"""
|
|
19
|
+
Human-readable markdown storage with full observability.
|
|
20
|
+
|
|
21
|
+
Directory structure:
|
|
22
|
+
memory/
|
|
23
|
+
├── verbatim/{user}/{yyyy}/{mm}/{dd}/{HH}-{MM}-{SS}_{topic}.md
|
|
24
|
+
├── experiential/{yyyy}/{mm}/{dd}/{HH}-{MM}-{SS}_reflection.md
|
|
25
|
+
├── links/{yyyy}/{mm}/{dd}/{interaction_id}_to_{note_id}.json
|
|
26
|
+
├── core/{yyyy}-{mm}-{dd}_snapshot.md
|
|
27
|
+
├── semantic/facts_{yyyy}-{mm}.md
|
|
28
|
+
└── index.json
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, base_path: str):
|
|
32
|
+
"""Initialize markdown storage at specified path"""
|
|
33
|
+
self.base_path = Path(base_path)
|
|
34
|
+
self.index_file = self.base_path / "index.json"
|
|
35
|
+
|
|
36
|
+
# Create directory structure
|
|
37
|
+
self._create_directories()
|
|
38
|
+
|
|
39
|
+
# Load or create index
|
|
40
|
+
index_existed = self.index_file.exists()
|
|
41
|
+
self.index = self._load_index()
|
|
42
|
+
|
|
43
|
+
# Save initial index if it was created new
|
|
44
|
+
if not index_existed:
|
|
45
|
+
self._save_index()
|
|
46
|
+
|
|
47
|
+
def _create_directories(self):
|
|
48
|
+
"""Create the directory structure"""
|
|
49
|
+
directories = [
|
|
50
|
+
"verbatim", "experiential", "links",
|
|
51
|
+
"core", "semantic"
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
for directory in directories:
|
|
55
|
+
(self.base_path / directory).mkdir(parents=True, exist_ok=True)
|
|
56
|
+
|
|
57
|
+
def _load_index(self) -> Dict:
|
|
58
|
+
"""Load the master index"""
|
|
59
|
+
if self.index_file.exists():
|
|
60
|
+
try:
|
|
61
|
+
with open(self.index_file, 'r', encoding='utf-8') as f:
|
|
62
|
+
return json.load(f)
|
|
63
|
+
except (json.JSONDecodeError, IOError):
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
# Create new index
|
|
67
|
+
return {
|
|
68
|
+
"created": datetime.now().isoformat(),
|
|
69
|
+
"last_updated": datetime.now().isoformat(),
|
|
70
|
+
"interactions": {},
|
|
71
|
+
"experiential_notes": {},
|
|
72
|
+
"links": {},
|
|
73
|
+
"topics": set(),
|
|
74
|
+
"users": set()
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
def _save_index(self):
|
|
78
|
+
"""Save the master index"""
|
|
79
|
+
self.index["last_updated"] = datetime.now().isoformat()
|
|
80
|
+
|
|
81
|
+
# Convert sets to lists for JSON serialization
|
|
82
|
+
index_copy = self.index.copy()
|
|
83
|
+
index_copy["topics"] = list(self.index["topics"]) if isinstance(self.index["topics"], set) else self.index["topics"]
|
|
84
|
+
index_copy["users"] = list(self.index["users"]) if isinstance(self.index["users"], set) else self.index["users"]
|
|
85
|
+
|
|
86
|
+
with open(self.index_file, 'w', encoding='utf-8') as f:
|
|
87
|
+
json.dump(index_copy, f, indent=2, ensure_ascii=False)
|
|
88
|
+
|
|
89
|
+
def _extract_topic(self, user_input: str, agent_response: str) -> str:
|
|
90
|
+
"""Extract main topic from interaction"""
|
|
91
|
+
# Simple topic extraction - could be enhanced with NLP
|
|
92
|
+
text = f"{user_input} {agent_response}".lower()
|
|
93
|
+
|
|
94
|
+
# Look for key terms
|
|
95
|
+
topics = []
|
|
96
|
+
if "python" in text:
|
|
97
|
+
topics.append("python")
|
|
98
|
+
if "code" in text or "programming" in text:
|
|
99
|
+
topics.append("coding")
|
|
100
|
+
if "learn" in text or "teach" in text:
|
|
101
|
+
topics.append("learning")
|
|
102
|
+
if "help" in text or "assist" in text:
|
|
103
|
+
topics.append("assistance")
|
|
104
|
+
if "memory" in text or "remember" in text:
|
|
105
|
+
topics.append("memory")
|
|
106
|
+
|
|
107
|
+
# Default topic from first few words of user input
|
|
108
|
+
if not topics:
|
|
109
|
+
words = user_input.split()[:3]
|
|
110
|
+
topic = "_".join(word.lower().strip(".,!?") for word in words if word.isalpha())
|
|
111
|
+
topics.append(topic or "general")
|
|
112
|
+
|
|
113
|
+
return topics[0]
|
|
114
|
+
|
|
115
|
+
def _get_date_path(self, timestamp: datetime) -> str:
|
|
116
|
+
"""Get date-based path component"""
|
|
117
|
+
return f"{timestamp.year:04d}/{timestamp.month:02d}/{timestamp.day:02d}"
|
|
118
|
+
|
|
119
|
+
def _get_time_prefix(self, timestamp: datetime) -> str:
|
|
120
|
+
"""Get time-based filename prefix"""
|
|
121
|
+
return f"{timestamp.hour:02d}-{timestamp.minute:02d}-{timestamp.second:02d}"
|
|
122
|
+
|
|
123
|
+
def save_interaction(self, user_id: str, timestamp: datetime,
|
|
124
|
+
user_input: str, agent_response: str,
|
|
125
|
+
topic: str, metadata: Optional[Dict] = None) -> str:
|
|
126
|
+
"""Save verbatim interaction to markdown"""
|
|
127
|
+
|
|
128
|
+
# Generate interaction ID
|
|
129
|
+
interaction_id = f"int_{uuid.uuid4().hex[:8]}"
|
|
130
|
+
|
|
131
|
+
# Extract or use provided topic
|
|
132
|
+
if not topic:
|
|
133
|
+
topic = self._extract_topic(user_input, agent_response)
|
|
134
|
+
|
|
135
|
+
# Create file path
|
|
136
|
+
date_path = self._get_date_path(timestamp)
|
|
137
|
+
time_prefix = self._get_time_prefix(timestamp)
|
|
138
|
+
filename = f"{time_prefix}_{topic}_{interaction_id}.md"
|
|
139
|
+
|
|
140
|
+
file_path = self.base_path / "verbatim" / user_id / date_path / filename
|
|
141
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
142
|
+
|
|
143
|
+
# Create markdown content
|
|
144
|
+
content = self._create_interaction_markdown(
|
|
145
|
+
interaction_id, user_id, timestamp, user_input,
|
|
146
|
+
agent_response, topic, metadata
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
# Write file
|
|
150
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
151
|
+
f.write(content)
|
|
152
|
+
|
|
153
|
+
# Update index
|
|
154
|
+
self.index["interactions"][interaction_id] = {
|
|
155
|
+
"file_path": str(file_path.relative_to(self.base_path)),
|
|
156
|
+
"user_id": user_id,
|
|
157
|
+
"timestamp": timestamp.isoformat(),
|
|
158
|
+
"topic": topic,
|
|
159
|
+
"linked_notes": []
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
# Ensure topics and users are sets
|
|
163
|
+
if not isinstance(self.index["topics"], set):
|
|
164
|
+
self.index["topics"] = set(self.index["topics"])
|
|
165
|
+
if not isinstance(self.index["users"], set):
|
|
166
|
+
self.index["users"] = set(self.index["users"])
|
|
167
|
+
|
|
168
|
+
self.index["topics"].add(topic)
|
|
169
|
+
self.index["users"].add(user_id)
|
|
170
|
+
self._save_index()
|
|
171
|
+
|
|
172
|
+
return interaction_id
|
|
173
|
+
|
|
174
|
+
def _create_interaction_markdown(self, interaction_id: str, user_id: str,
|
|
175
|
+
timestamp: datetime, user_input: str,
|
|
176
|
+
agent_response: str, topic: str,
|
|
177
|
+
metadata: Optional[Dict]) -> str:
|
|
178
|
+
"""Create markdown content for interaction"""
|
|
179
|
+
|
|
180
|
+
content = f"""# Interaction: {topic}
|
|
181
|
+
|
|
182
|
+
**ID**: `{interaction_id}`
|
|
183
|
+
**Date**: {timestamp.isoformat()}
|
|
184
|
+
**User**: {user_id}
|
|
185
|
+
**Topic**: {topic}
|
|
186
|
+
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
if metadata:
|
|
190
|
+
content += "## Metadata\n\n"
|
|
191
|
+
for key, value in metadata.items():
|
|
192
|
+
content += f"- **{key}**: {value}\n"
|
|
193
|
+
content += "\n"
|
|
194
|
+
|
|
195
|
+
content += f"""## User Input
|
|
196
|
+
|
|
197
|
+
{user_input}
|
|
198
|
+
|
|
199
|
+
## Agent Response
|
|
200
|
+
|
|
201
|
+
{agent_response}
|
|
202
|
+
|
|
203
|
+
## Links
|
|
204
|
+
|
|
205
|
+
*Linked experiential notes will appear here*
|
|
206
|
+
|
|
207
|
+
---
|
|
208
|
+
*Generated by AbstractMemory - {timestamp.isoformat()}*
|
|
209
|
+
"""
|
|
210
|
+
|
|
211
|
+
return content
|
|
212
|
+
|
|
213
|
+
def save_experiential_note(self, timestamp: datetime, reflection: str,
|
|
214
|
+
interaction_id: str, note_type: str = "reflection",
|
|
215
|
+
metadata: Optional[Dict] = None) -> str:
|
|
216
|
+
"""Save AI experiential note to markdown"""
|
|
217
|
+
|
|
218
|
+
# Generate note ID
|
|
219
|
+
note_id = f"note_{uuid.uuid4().hex[:8]}"
|
|
220
|
+
|
|
221
|
+
# Create file path
|
|
222
|
+
date_path = self._get_date_path(timestamp)
|
|
223
|
+
time_prefix = self._get_time_prefix(timestamp)
|
|
224
|
+
filename = f"{time_prefix}_{note_type}_{note_id}.md"
|
|
225
|
+
|
|
226
|
+
file_path = self.base_path / "experiential" / date_path / filename
|
|
227
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
228
|
+
|
|
229
|
+
# Create markdown content
|
|
230
|
+
content = self._create_experiential_markdown(
|
|
231
|
+
note_id, timestamp, reflection, interaction_id, note_type, metadata
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
# Write file
|
|
235
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
236
|
+
f.write(content)
|
|
237
|
+
|
|
238
|
+
# Update index
|
|
239
|
+
self.index["experiential_notes"][note_id] = {
|
|
240
|
+
"file_path": str(file_path.relative_to(self.base_path)),
|
|
241
|
+
"timestamp": timestamp.isoformat(),
|
|
242
|
+
"note_type": note_type,
|
|
243
|
+
"linked_interaction": interaction_id
|
|
244
|
+
}
|
|
245
|
+
self._save_index()
|
|
246
|
+
|
|
247
|
+
return note_id
|
|
248
|
+
|
|
249
|
+
def _create_experiential_markdown(self, note_id: str, timestamp: datetime,
|
|
250
|
+
reflection: str, interaction_id: str,
|
|
251
|
+
note_type: str, metadata: Optional[Dict]) -> str:
|
|
252
|
+
"""Create markdown content for experiential note"""
|
|
253
|
+
|
|
254
|
+
content = f"""# AI {note_type.title()}: {note_id}
|
|
255
|
+
|
|
256
|
+
**Note ID**: `{note_id}`
|
|
257
|
+
**Date**: {timestamp.isoformat()}
|
|
258
|
+
**Type**: {note_type}
|
|
259
|
+
**Triggered by**: [Interaction {interaction_id}](../../verbatim/.../.../{interaction_id}.md)
|
|
260
|
+
|
|
261
|
+
"""
|
|
262
|
+
|
|
263
|
+
if metadata:
|
|
264
|
+
content += "## Context\n\n"
|
|
265
|
+
for key, value in metadata.items():
|
|
266
|
+
content += f"- **{key}**: {value}\n"
|
|
267
|
+
content += "\n"
|
|
268
|
+
|
|
269
|
+
content += f"""## Reflection
|
|
270
|
+
|
|
271
|
+
{reflection}
|
|
272
|
+
|
|
273
|
+
## Insights
|
|
274
|
+
|
|
275
|
+
*This section could contain extracted insights, patterns, or learnings*
|
|
276
|
+
|
|
277
|
+
---
|
|
278
|
+
*AI experiential note generated by AbstractMemory - {timestamp.isoformat()}*
|
|
279
|
+
"""
|
|
280
|
+
|
|
281
|
+
return content
|
|
282
|
+
|
|
283
|
+
def link_interaction_to_note(self, interaction_id: str, note_id: str) -> None:
|
|
284
|
+
"""Create bidirectional link between interaction and note"""
|
|
285
|
+
|
|
286
|
+
# Create link metadata
|
|
287
|
+
link_data = {
|
|
288
|
+
"interaction_id": interaction_id,
|
|
289
|
+
"note_id": note_id,
|
|
290
|
+
"created": datetime.now().isoformat(),
|
|
291
|
+
"type": "bidirectional"
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
# Save link file
|
|
295
|
+
if interaction_id in self.index["interactions"]:
|
|
296
|
+
interaction_data = self.index["interactions"][interaction_id]
|
|
297
|
+
timestamp = datetime.fromisoformat(interaction_data["timestamp"])
|
|
298
|
+
date_path = self._get_date_path(timestamp)
|
|
299
|
+
|
|
300
|
+
link_file = self.base_path / "links" / date_path / f"{interaction_id}_to_{note_id}.json"
|
|
301
|
+
link_file.parent.mkdir(parents=True, exist_ok=True)
|
|
302
|
+
|
|
303
|
+
with open(link_file, 'w', encoding='utf-8') as f:
|
|
304
|
+
json.dump(link_data, f, indent=2)
|
|
305
|
+
|
|
306
|
+
# Update index
|
|
307
|
+
if interaction_id in self.index["interactions"]:
|
|
308
|
+
self.index["interactions"][interaction_id]["linked_notes"].append(note_id)
|
|
309
|
+
|
|
310
|
+
self.index["links"][f"{interaction_id}_{note_id}"] = link_data
|
|
311
|
+
self._save_index()
|
|
312
|
+
|
|
313
|
+
def search_interactions(self, query: str, user_id: Optional[str] = None,
|
|
314
|
+
start_date: Optional[datetime] = None,
|
|
315
|
+
end_date: Optional[datetime] = None) -> List[Dict]:
|
|
316
|
+
"""Search interactions using file system and text matching"""
|
|
317
|
+
|
|
318
|
+
results = []
|
|
319
|
+
query_lower = query.lower()
|
|
320
|
+
|
|
321
|
+
for interaction_id, interaction_data in self.index["interactions"].items():
|
|
322
|
+
# Filter by user
|
|
323
|
+
if user_id and interaction_data["user_id"] != user_id:
|
|
324
|
+
continue
|
|
325
|
+
|
|
326
|
+
# Filter by date range
|
|
327
|
+
interaction_time = datetime.fromisoformat(interaction_data["timestamp"])
|
|
328
|
+
if start_date and interaction_time < start_date:
|
|
329
|
+
continue
|
|
330
|
+
if end_date and interaction_time > end_date:
|
|
331
|
+
continue
|
|
332
|
+
|
|
333
|
+
# Check topic match
|
|
334
|
+
if query_lower in interaction_data["topic"].lower():
|
|
335
|
+
results.append({
|
|
336
|
+
"id": interaction_id,
|
|
337
|
+
"timestamp": interaction_data["timestamp"],
|
|
338
|
+
"user_id": interaction_data["user_id"],
|
|
339
|
+
"topic": interaction_data["topic"],
|
|
340
|
+
"file_path": interaction_data["file_path"],
|
|
341
|
+
"match_type": "topic"
|
|
342
|
+
})
|
|
343
|
+
continue
|
|
344
|
+
|
|
345
|
+
# Search file content
|
|
346
|
+
file_path = self.base_path / interaction_data["file_path"]
|
|
347
|
+
if file_path.exists():
|
|
348
|
+
try:
|
|
349
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
350
|
+
content = f.read().lower()
|
|
351
|
+
if query_lower in content:
|
|
352
|
+
results.append({
|
|
353
|
+
"id": interaction_id,
|
|
354
|
+
"timestamp": interaction_data["timestamp"],
|
|
355
|
+
"user_id": interaction_data["user_id"],
|
|
356
|
+
"topic": interaction_data["topic"],
|
|
357
|
+
"file_path": interaction_data["file_path"],
|
|
358
|
+
"match_type": "content"
|
|
359
|
+
})
|
|
360
|
+
except IOError:
|
|
361
|
+
pass
|
|
362
|
+
|
|
363
|
+
# Sort by timestamp (newest first)
|
|
364
|
+
results.sort(key=lambda x: x["timestamp"], reverse=True)
|
|
365
|
+
return results
|
|
366
|
+
|
|
367
|
+
# IStorage interface implementation
|
|
368
|
+
def save(self, key: str, value: Any) -> None:
|
|
369
|
+
"""Generic save for compatibility"""
|
|
370
|
+
# For memory component snapshots
|
|
371
|
+
if "/" in key:
|
|
372
|
+
component_name = key.split("/")[-1]
|
|
373
|
+
self.save_memory_component(component_name, value)
|
|
374
|
+
|
|
375
|
+
def load(self, key: str) -> Any:
|
|
376
|
+
"""Generic load for compatibility"""
|
|
377
|
+
if "/" in key:
|
|
378
|
+
component_name = key.split("/")[-1]
|
|
379
|
+
return self.load_memory_component(component_name)
|
|
380
|
+
return None
|
|
381
|
+
|
|
382
|
+
def exists(self, key: str) -> bool:
|
|
383
|
+
"""Check if key exists"""
|
|
384
|
+
if "/" in key:
|
|
385
|
+
component_name = key.split("/")[-1]
|
|
386
|
+
component_file = self.base_path / "core" / f"{component_name}_latest.json"
|
|
387
|
+
return component_file.exists()
|
|
388
|
+
return False
|
|
389
|
+
|
|
390
|
+
def save_memory_component(self, component_name: str, component_data: Any) -> None:
|
|
391
|
+
"""Save memory component as human-readable snapshot"""
|
|
392
|
+
timestamp = datetime.now()
|
|
393
|
+
date_str = timestamp.strftime("%Y-%m-%d")
|
|
394
|
+
|
|
395
|
+
# Save as JSON for structured data
|
|
396
|
+
json_file = self.base_path / "core" / f"{component_name}_{date_str}.json"
|
|
397
|
+
json_file.parent.mkdir(parents=True, exist_ok=True)
|
|
398
|
+
|
|
399
|
+
# Convert component to serializable format
|
|
400
|
+
if hasattr(component_data, '__dict__'):
|
|
401
|
+
data = component_data.__dict__
|
|
402
|
+
else:
|
|
403
|
+
data = component_data
|
|
404
|
+
|
|
405
|
+
with open(json_file, 'w', encoding='utf-8') as f:
|
|
406
|
+
json.dump(data, f, indent=2, default=str, ensure_ascii=False)
|
|
407
|
+
|
|
408
|
+
# Create symlink to latest
|
|
409
|
+
latest_file = self.base_path / "core" / f"{component_name}_latest.json"
|
|
410
|
+
if latest_file.exists():
|
|
411
|
+
latest_file.unlink()
|
|
412
|
+
latest_file.symlink_to(json_file.name)
|
|
413
|
+
|
|
414
|
+
def load_memory_component(self, component_name: str) -> Optional[Any]:
|
|
415
|
+
"""Load latest memory component"""
|
|
416
|
+
latest_file = self.base_path / "core" / f"{component_name}_latest.json"
|
|
417
|
+
|
|
418
|
+
if latest_file.exists():
|
|
419
|
+
try:
|
|
420
|
+
with open(latest_file, 'r', encoding='utf-8') as f:
|
|
421
|
+
return json.load(f)
|
|
422
|
+
except (json.JSONDecodeError, IOError):
|
|
423
|
+
pass
|
|
424
|
+
|
|
425
|
+
return None
|
|
426
|
+
|
|
427
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
428
|
+
"""Get storage statistics"""
|
|
429
|
+
return {
|
|
430
|
+
"total_interactions": len(self.index["interactions"]),
|
|
431
|
+
"total_notes": len(self.index["experiential_notes"]),
|
|
432
|
+
"total_links": len(self.index["links"]),
|
|
433
|
+
"unique_users": len(self.index["users"]),
|
|
434
|
+
"unique_topics": len(self.index["topics"]),
|
|
435
|
+
"base_path": str(self.base_path),
|
|
436
|
+
"storage_size_mb": self._get_directory_size() / (1024 * 1024)
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
def _get_directory_size(self) -> int:
|
|
440
|
+
"""Get total size of storage directory in bytes"""
|
|
441
|
+
total_size = 0
|
|
442
|
+
for dirpath, dirnames, filenames in os.walk(self.base_path):
|
|
443
|
+
for filename in filenames:
|
|
444
|
+
filepath = os.path.join(dirpath, filename)
|
|
445
|
+
if os.path.exists(filepath):
|
|
446
|
+
total_size += os.path.getsize(filepath)
|
|
447
|
+
return total_size
|