context-use 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. context_use-0.1.0/.gitignore +23 -0
  2. context_use-0.1.0/CHANGELOG.md +125 -0
  3. context_use-0.1.0/PKG-INFO +176 -0
  4. context_use-0.1.0/README.md +154 -0
  5. context_use-0.1.0/context_use/__init__.py +24 -0
  6. context_use-0.1.0/context_use/activitystreams/__init__.py +170 -0
  7. context_use-0.1.0/context_use/activitystreams/activities.py +280 -0
  8. context_use-0.1.0/context_use/activitystreams/actors.py +55 -0
  9. context_use-0.1.0/context_use/activitystreams/core.py +176 -0
  10. context_use-0.1.0/context_use/activitystreams/links.py +19 -0
  11. context_use-0.1.0/context_use/activitystreams/objects.py +147 -0
  12. context_use-0.1.0/context_use/agent/__init__.py +0 -0
  13. context_use-0.1.0/context_use/agent/backend.py +29 -0
  14. context_use-0.1.0/context_use/agent/protocol.py +57 -0
  15. context_use-0.1.0/context_use/agent/skill.py +55 -0
  16. context_use-0.1.0/context_use/agent/skills/synthesise.md +108 -0
  17. context_use-0.1.0/context_use/agent/skills/user_profile.md +57 -0
  18. context_use-0.1.0/context_use/agent/system.md +30 -0
  19. context_use-0.1.0/context_use/agent/tools.py +266 -0
  20. context_use-0.1.0/context_use/batch/__init__.py +61 -0
  21. context_use-0.1.0/context_use/batch/factory.py +85 -0
  22. context_use-0.1.0/context_use/batch/grouper.py +104 -0
  23. context_use-0.1.0/context_use/batch/manager.py +174 -0
  24. context_use-0.1.0/context_use/batch/policy.py +33 -0
  25. context_use-0.1.0/context_use/batch/registry.py +31 -0
  26. context_use-0.1.0/context_use/batch/runner.py +70 -0
  27. context_use-0.1.0/context_use/batch/states.py +107 -0
  28. context_use-0.1.0/context_use/cli/__init__.py +3 -0
  29. context_use-0.1.0/context_use/cli/app.py +114 -0
  30. context_use-0.1.0/context_use/cli/base.py +438 -0
  31. context_use-0.1.0/context_use/cli/commands/__init__.py +22 -0
  32. context_use-0.1.0/context_use/cli/commands/agent.py +158 -0
  33. context_use-0.1.0/context_use/cli/commands/config.py +287 -0
  34. context_use-0.1.0/context_use/cli/commands/ingest.py +63 -0
  35. context_use-0.1.0/context_use/cli/commands/memories.py +386 -0
  36. context_use-0.1.0/context_use/cli/commands/pipeline.py +244 -0
  37. context_use-0.1.0/context_use/cli/commands/reset.py +47 -0
  38. context_use-0.1.0/context_use/cli/output.py +92 -0
  39. context_use-0.1.0/context_use/config.py +228 -0
  40. context_use-0.1.0/context_use/etl/core/__init__.py +13 -0
  41. context_use-0.1.0/context_use/etl/core/exceptions.py +10 -0
  42. context_use-0.1.0/context_use/etl/core/pipe.py +142 -0
  43. context_use-0.1.0/context_use/etl/core/types.py +24 -0
  44. context_use-0.1.0/context_use/etl/payload/__init__.py +78 -0
  45. context_use-0.1.0/context_use/etl/payload/core.py +26 -0
  46. context_use-0.1.0/context_use/etl/payload/models.py +431 -0
  47. context_use-0.1.0/context_use/ext/__init__.py +1 -0
  48. context_use-0.1.0/context_use/ext/adk/__init__.py +0 -0
  49. context_use-0.1.0/context_use/ext/adk/agent/__init__.py +0 -0
  50. context_use-0.1.0/context_use/ext/adk/agent/agent.py +66 -0
  51. context_use-0.1.0/context_use/ext/adk/agent/runner.py +94 -0
  52. context_use-0.1.0/context_use/ext/mcp_use/__init__.py +1 -0
  53. context_use-0.1.0/context_use/ext/mcp_use/run.py +53 -0
  54. context_use-0.1.0/context_use/ext/mcp_use/server.py +49 -0
  55. context_use-0.1.0/context_use/facade/__init__.py +14 -0
  56. context_use-0.1.0/context_use/facade/core.py +437 -0
  57. context_use-0.1.0/context_use/facade/types.py +26 -0
  58. context_use-0.1.0/context_use/llm/__init__.py +24 -0
  59. context_use-0.1.0/context_use/llm/base.py +84 -0
  60. context_use-0.1.0/context_use/llm/litellm.py +464 -0
  61. context_use-0.1.0/context_use/llm/models.py +18 -0
  62. context_use-0.1.0/context_use/memories/__init__.py +12 -0
  63. context_use-0.1.0/context_use/memories/config.py +27 -0
  64. context_use-0.1.0/context_use/memories/embedding.py +56 -0
  65. context_use-0.1.0/context_use/memories/extractor.py +34 -0
  66. context_use-0.1.0/context_use/memories/factory.py +10 -0
  67. context_use-0.1.0/context_use/memories/manager.py +180 -0
  68. context_use-0.1.0/context_use/memories/prompt/__init__.py +15 -0
  69. context_use-0.1.0/context_use/memories/prompt/base.py +98 -0
  70. context_use-0.1.0/context_use/memories/prompt/conversation.py +168 -0
  71. context_use-0.1.0/context_use/memories/prompt/media.py +173 -0
  72. context_use-0.1.0/context_use/memories/states.py +104 -0
  73. context_use-0.1.0/context_use/models/__init__.py +32 -0
  74. context_use-0.1.0/context_use/models/archive.py +30 -0
  75. context_use-0.1.0/context_use/models/batch.py +70 -0
  76. context_use-0.1.0/context_use/models/etl_task.py +44 -0
  77. context_use-0.1.0/context_use/models/memory.py +47 -0
  78. context_use-0.1.0/context_use/models/thread.py +63 -0
  79. context_use-0.1.0/context_use/models/utils.py +5 -0
  80. context_use-0.1.0/context_use/prompt_categories.py +84 -0
  81. context_use-0.1.0/context_use/providers/__init__.py +22 -0
  82. context_use-0.1.0/context_use/providers/chatgpt/__init__.py +7 -0
  83. context_use-0.1.0/context_use/providers/chatgpt/conversations.py +187 -0
  84. context_use-0.1.0/context_use/providers/chatgpt/schemas.py +35 -0
  85. context_use-0.1.0/context_use/providers/claude/__init__.py +7 -0
  86. context_use-0.1.0/context_use/providers/claude/conversations.py +168 -0
  87. context_use-0.1.0/context_use/providers/claude/schemas.py +20 -0
  88. context_use-0.1.0/context_use/providers/google/__init__.py +10 -0
  89. context_use-0.1.0/context_use/providers/google/base.py +112 -0
  90. context_use-0.1.0/context_use/providers/google/discover.py +21 -0
  91. context_use-0.1.0/context_use/providers/google/lens.py +69 -0
  92. context_use-0.1.0/context_use/providers/google/schemas.py +32 -0
  93. context_use-0.1.0/context_use/providers/google/search.py +88 -0
  94. context_use-0.1.0/context_use/providers/google/shopping.py +20 -0
  95. context_use-0.1.0/context_use/providers/google/youtube.py +161 -0
  96. context_use-0.1.0/context_use/providers/instagram/__init__.py +28 -0
  97. context_use-0.1.0/context_use/providers/instagram/comments.py +137 -0
  98. context_use-0.1.0/context_use/providers/instagram/connections.py +156 -0
  99. context_use-0.1.0/context_use/providers/instagram/likes.py +280 -0
  100. context_use-0.1.0/context_use/providers/instagram/media.py +166 -0
  101. context_use-0.1.0/context_use/providers/instagram/posts_viewed.py +179 -0
  102. context_use-0.1.0/context_use/providers/instagram/profile_searches.py +101 -0
  103. context_use-0.1.0/context_use/providers/instagram/saved.py +233 -0
  104. context_use-0.1.0/context_use/providers/instagram/schemas.py +329 -0
  105. context_use-0.1.0/context_use/providers/instagram/videos_watched.py +145 -0
  106. context_use-0.1.0/context_use/providers/registry.py +95 -0
  107. context_use-0.1.0/context_use/providers/types.py +90 -0
  108. context_use-0.1.0/context_use/storage/__init__.py +7 -0
  109. context_use-0.1.0/context_use/storage/base.py +43 -0
  110. context_use-0.1.0/context_use/storage/disk.py +52 -0
  111. context_use-0.1.0/context_use/storage/gcs.py +67 -0
  112. context_use-0.1.0/context_use/store/__init__.py +8 -0
  113. context_use-0.1.0/context_use/store/base.py +215 -0
  114. context_use-0.1.0/context_use/store/memory.py +247 -0
  115. context_use-0.1.0/context_use/store/postgres/__init__.py +11 -0
  116. context_use-0.1.0/context_use/store/postgres/backend.py +100 -0
  117. context_use-0.1.0/context_use/store/postgres/orm/__init__.py +16 -0
  118. context_use-0.1.0/context_use/store/postgres/orm/base.py +32 -0
  119. context_use-0.1.0/context_use/store/postgres/orm/batch.py +127 -0
  120. context_use-0.1.0/context_use/store/postgres/orm/etl.py +106 -0
  121. context_use-0.1.0/context_use/store/postgres/orm/memory.py +58 -0
  122. context_use-0.1.0/context_use/store/postgres/store.py +516 -0
  123. context_use-0.1.0/context_use/testing/__init__.py +3 -0
  124. context_use-0.1.0/context_use/testing/pipe_test_kit.py +162 -0
  125. context_use-0.1.0/pyproject.toml +174 -0
@@ -0,0 +1,23 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
11
+
12
+ data/**
13
+ !data/*/
14
+ !data/*/.gitkeep
15
+
16
+ *.db
17
+ *.zip
18
+
19
+ # Environment files
20
+ .env*
21
+ !.env*.example
22
+
23
+ .DS_Store
@@ -0,0 +1,125 @@
1
+ ## [0.1.0] - 2026-03-09
2
+
3
+ ### 🚀 Features
4
+
5
+ - Add Google ETL for Discover and Lens (#116)
6
+ - Centralize memory operations (#115)
7
+ - Add Google Shopping ETL (#114)
8
+ - Add ETL for Google Youtube (#113)
9
+ - Add Google search ETLs (#112)
10
+ - Claude conversations etl and memory pipelines (#111)
11
+ - Add FibreDislike (#109)
12
+ - Provider and interaction auto registration (#107)
13
+ - Remove ask command (#106)
14
+ - Pg as an optional dep (#104)
15
+ - Split cli commands (#101)
16
+ - Basic profile generation skill (#100)
17
+ - Remove one shot user profile generation (#99)
18
+ - Agent with memory synthesise skill (#98)
19
+ - Refinement agent (#96)
20
+ - Remove memory refinement pipeline (#95)
21
+ - Remove unused store methods (#94)
22
+ - [**breaking**] Generate memories from unprocessed threads (#93)
23
+ - Bump thread payload version to 1.1.0 (#91)
24
+ - Add IG ETL saved posts and collections (#90)
25
+ - Add IG ETL for likes, followers and comments (#89)
26
+ - Add IG ETL for posts_viewed and profile_searches (#88)
27
+ - Add fibre classes needed for instagram and google (#87)
28
+ - Add complete ActivityStreams schemas (#86)
29
+ - Add IG ETL for videos watched (#83)
30
+ - [**breaking**] Switch EtlTask to plural source_uris (#79)
31
+ - BatchContext to group configs (#78)
32
+ - Resolve asset uri for gcs storage (#77)
33
+ - Cli command to run e2e batch pipeline (#71)
34
+ - Config registry (#76)
35
+ - Base llm client and litellm implementation (#74)
36
+ - Cli quickstart and pg config (#68)
37
+ - Common categories for prompts (#67)
38
+ - Real time mode on subset of the data (#60)
39
+ - Use store facade (#57)
40
+ - Pg store (#56)
41
+ - In memory store (#55)
42
+ - Add abstract storage models (#54)
43
+ - Group ids instead of key (#53)
44
+ - Decouple batches and etls (#52)
45
+ - Add AGENTS.md doc for building ETL, symlink from CLAUDE and CONTRIBUTING mds (#42)
46
+ - Process chatgpt conversations (#44)
47
+ - Thread grouping (#43)
48
+ - Switch IG media ETL to pipe for ET and loader for L (#37)
49
+ - Switch gpt ETL to pipe for ET and loader for L (#36)
50
+ - Add foundation classes for Pipe ET pipeline and a separate Loader (#35)
51
+ - Mcp-use extension (#33)
52
+ - Support for latest openai models (#31)
53
+ - Enable pgvector extension (#27)
54
+ - Batch memory embeddings (#25)
55
+ - Litellm as llm client (#24)
56
+ - Gemini batch api implementation (#22)
57
+ - Dedup threads in upload strategy (#21)
58
+ - Simple batch memories generation from ig media (#18)
59
+ - Add etl_tasks.source_uri and archives.file_uris columns (#15)
60
+ - Only use postgres as db (#13)
61
+ - Basic etl from zip archives (#5)
62
+ - Initial repo setup
63
+
64
+ ### 🐛 Bug Fixes
65
+
66
+ - Report extraction failures from pipe (#117)
67
+ - Return full agent response (#103)
68
+ - IG ETL liked_posts v1/v0 split (#97)
69
+ - Allow IG profile_searches without username as value (#92)
70
+ - Make pipe resilient to parsing errors (#85)
71
+ - Fix videos_watches parsing (#84)
72
+ - Switch pipe versions to numeric (#81)
73
+ - Rm unused EtlTaskStatus fields (#80)
74
+ - Process split chatgpt converations (#72)
75
+ - Switch to legacy format for thread payload (#70)
76
+ - Simplify AGENTS.md, remove code quotations (#66)
77
+ - Use unique key suffix from payload as unique_key (#65)
78
+ - Archives concurrency (#51)
79
+ - Switch pipe archive path to glob pattern matching (#41)
80
+ - Absolute path resolution in storage backend (#28)
81
+ - Make asset_uri have the path relative to DiskStorage root (#14)
82
+
83
+ ### 💼 Other
84
+
85
+ - Downgrade asyncpg to 0.30.0 (#69)
86
+ - [**breaking**] Lower minimum python to 3.12 (#62)
87
+
88
+ ### 🚜 Refactor
89
+
90
+ - Agent module outside of memories (#108)
91
+ - Move configs out of cli (#102)
92
+ - Consolidate places where we generate a uuid (#63)
93
+ - Unify etl and memory configs (#45)
94
+ - Add pydantic-typed records between E and T (#23)
95
+ - [**breaking**] Async db (#29)
96
+ - [**breaking**] Async litellm (#26)
97
+ - Rm task descriptors, use EtlTask (#19)
98
+ - Rm TaskMetadata, replace with EtlTask (#17)
99
+ - Move session ownership out of etl (#16)
100
+ - Etl module (#10)
101
+
102
+ ### 🧪 Testing
103
+
104
+ - Refactor tests folder (#61)
105
+ - Add pipe test kit which provides generic test harness for new etl (#40)
106
+ - Add synthetic archives for ig and gpt data (#30)
107
+
108
+ ### ⚙️ Miscellaneous Tasks
109
+
110
+ - Only include necessary files in build (#126)
111
+ - Remove code-specific guides from contributing (#125)
112
+ - Publish release workflow (#124)
113
+ - Prepare release workflow (#119)
114
+ - Generate changelog using `git-cliff` (#118)
115
+ - Exit at first failure (#110)
116
+ - Update readme (#105)
117
+ - Rm stale ETL Loader classes (#73)
118
+ - [**breaking**] Drop pandas from dependencies (#39)
119
+ - Clean up dead ETL code (#38)
120
+ - Remove useless comments (#34)
121
+ - Typecheck tests (#20)
122
+ - Minor (#12)
123
+ - Docker compose local env (#11)
124
+ - Run pytest on every PR (#8)
125
+ - Pre-commit config (#9)
@@ -0,0 +1,176 @@
1
+ Metadata-Version: 2.4
2
+ Name: context-use
3
+ Version: 0.1.0
4
+ Requires-Python: >=3.12
5
+ Requires-Dist: ijson>=3.0
6
+ Requires-Dist: litellm>=1.81.13
7
+ Requires-Dist: pydantic>=2.0
8
+ Requires-Dist: tenacity>=9.1.4
9
+ Provides-Extra: adk
10
+ Requires-Dist: google-adk>=1.26; extra == 'adk'
11
+ Provides-Extra: gcs
12
+ Requires-Dist: google-cloud-storage<3,>=2.10.0; extra == 'gcs'
13
+ Provides-Extra: mcp-use
14
+ Requires-Dist: langchain-openai>=1.1.10; extra == 'mcp-use'
15
+ Requires-Dist: mcp-use>=1.6.0; extra == 'mcp-use'
16
+ Provides-Extra: postgres
17
+ Requires-Dist: asyncpg>=0.30.0; extra == 'postgres'
18
+ Requires-Dist: greenlet>=3.3.1; extra == 'postgres'
19
+ Requires-Dist: pgvector>=0.4.2; extra == 'postgres'
20
+ Requires-Dist: sqlalchemy>=2.0; extra == 'postgres'
21
+ Description-Content-Type: text/markdown
22
+
23
+ # context-use
24
+
25
+ Turn your data exports into portable AI memory.
26
+
27
+ ## Features
28
+
29
+ - **Ingest** — parse provider export ZIPs into structured threads; no cloud upload required
30
+ - **Quickstart** — zero-database preview mode; results written to `data/output/` with no setup beyond an OpenAI key
31
+ - **Full pipeline** — persistent storage in PostgreSQL with pgvector; full archive history, batch API for cost-efficient memory generation
32
+ - **Semantic search** — `memories search` queries your memory store by meaning, not just keywords
33
+ - **MCP server** — expose memories and semantic search to Claude Desktop, Cursor, or any MCP client
34
+ - **Personal agent** — multi-turn agent that synthesises higher-level pattern memories, generates a first-person profile, or runs ad-hoc queries against your memory store
35
+
36
+ ## Supported providers
37
+
38
+ | Provider | Status | Data types | Export guide |
39
+ |----------|--------|------------|-------------|
40
+ | ChatGPT | Available | Conversations | [Export your data](https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data) |
41
+ | Instagram | Available | Stories, Reels, Posts | [Download your data](https://help.instagram.com/181231772500920) |
42
+ | WhatsApp | Coming soon | | |
43
+ | Google Takeout | Coming soon | | |
44
+
45
+ ## Getting your export
46
+
47
+ 1. Follow the export guide for your provider in the table above. The export is delivered as a ZIP file — **do not extract it**.
48
+ 2. Move or copy the ZIP into `data/input/` inside the cloned repo:
49
+
50
+ ```
51
+ context-use/
52
+ └── data/
53
+ └── input/
54
+ └── chatgpt-export.zip ← place it here
55
+ ```
56
+
57
+ Both `quickstart` and `pipeline` scan `data/input/` for exports on startup and prompt you to pick one if multiple are present.
58
+
59
+ ## Install
60
+
61
+ ```bash
62
+ git clone https://github.com/onfabric/context-use.git
63
+ cd context-use
64
+ uv sync
65
+ source .venv/bin/activate
66
+ ```
67
+
68
+ Set your OpenAI API key:
69
+
70
+ ```bash
71
+ context-use config set-key
72
+ # or: export OPENAI_API_KEY=sk-...
73
+ ```
74
+
75
+ ## Quick start
76
+
77
+ A zero-setup preview that requires no database.
78
+
79
+ ```bash
80
+ context-use quickstart
81
+ ```
82
+
83
+ The CLI prompts for the export and provider. Memory generation uses the OpenAI **real-time API** — fast for small slices but susceptible to rate limits on large exports. By default only the last 30 days are processed; use `--full` to include the complete history (the CLI warns you before proceeding).
84
+
85
+ The output is a snapshot: memories are written to `data/output/` as Markdown and JSON, then discarded. Nothing is stored in a database, so the memories are not queryable, searchable, or available to the MCP server after the command exits.
86
+
87
+ **The full pipeline is the intended way to use context-use beyond this initial preview.**
88
+
89
+ ## Full pipeline
90
+
91
+ For persistent storage, semantic search, and the MCP server.
92
+
93
+ **1. Set up PostgreSQL (one-time)**
94
+
95
+ ```bash
96
+ context-use config set-store postgres
97
+ ```
98
+
99
+ Prompts to start a local container via Docker, then saves connection details to `~/.config/context-use/config.toml`. Skip Docker if you're bringing your own PostgreSQL instance.
100
+
101
+ **2. Run the pipeline**
102
+
103
+ ```bash
104
+ context-use pipeline
105
+ ```
106
+
107
+ Ingests the export and generates memories via the OpenAI **batch API** — significantly cheaper and more rate-limit-friendly than the real-time API used by quickstart. Typical runtime: 2–10 minutes. Memories are stored in PostgreSQL and persist across sessions, enabling semantic search, the MCP server, and the personal agent.
108
+
109
+ **3. Explore your memories**
110
+
111
+ ```bash
112
+ context-use memories list
113
+ context-use memories search "hiking trips in 2024"
114
+ ```
115
+
116
+ ## MCP server
117
+
118
+ Requires the full pipeline (PostgreSQL).
119
+
120
+ ```bash
121
+ python -m context_use.ext.mcp_use.run
122
+ # use --transport stdio for clients that prefer stdio
123
+ ```
124
+
125
+ Add to your MCP client config (Claude Desktop, Cursor, etc.):
126
+
127
+ ```json
128
+ {
129
+ "mcpServers": {
130
+ "context-use": {
131
+ "command": "python",
132
+ "args": ["-m", "context_use.ext.mcp_use.run", "--transport", "stdio"]
133
+ }
134
+ }
135
+ }
136
+ ```
137
+
138
+ Claude Desktop config path: `~/Library/Application Support/Claude/claude_desktop_config.json`. Cursor: Settings → MCP.
139
+
140
+ ## Personal agent
141
+
142
+ A multi-turn agent that operates over your full memory store. Requires PostgreSQL.
143
+
144
+ ```bash
145
+ context-use config set-agent adk
146
+ context-use agent synthesise # generate higher-level pattern memories
147
+ context-use agent profile # compile a first-person profile
148
+ context-use agent ask "What topics do I keep coming back to across all my conversations?"
149
+ ```
150
+
151
+ ## Configuration
152
+
153
+ Config file: `~/.config/context-use/config.toml`. Run `context-use config show` to see all active values and where each comes from (env var, file, or built-in default).
154
+
155
+ | Setting | CLI command | Env var | Default |
156
+ |---------|-------------|---------|---------|
157
+ | OpenAI API key | `config set-key` | `OPENAI_API_KEY` | — |
158
+ | Model | edit config file | `OPENAI_MODEL` | `gpt-5.2` |
159
+ | Embedding model | edit config file | `OPENAI_EMBEDDING_MODEL` | `text-embedding-3-large` |
160
+ | Store backend | `config set-store postgres\|memory` | `CONTEXT_USE_STORE` | `memory` |
161
+ | PostgreSQL host | `config set-store postgres` | `POSTGRES_HOST` | `localhost` |
162
+ | PostgreSQL port | `config set-store postgres` | `POSTGRES_PORT` | `5432` |
163
+ | PostgreSQL database | `config set-store postgres` | `POSTGRES_DB` | `context_use` |
164
+ | PostgreSQL user | `config set-store postgres` | `POSTGRES_USER` | `postgres` |
165
+ | PostgreSQL password | `config set-store postgres` | `POSTGRES_PASSWORD` | `postgres` |
166
+ | Agent backend | `config set-agent adk` | `CONTEXT_USE_AGENT_BACKEND` | — |
167
+ | Data directory | edit config file | — | `./data` |
168
+
169
+
170
+ ## Adding new providers and pipes
171
+
172
+ See [AGENTS.md](AGENTS.md) for `context-use`'s architecture and how to add new providers and pipes.
173
+
174
+ ## Contributing
175
+
176
+ See [CONTRIBUTING.md](.github/CONTRIBUTING.md) for how to contribute to the `context-use` project.
@@ -0,0 +1,154 @@
1
+ # context-use
2
+
3
+ Turn your data exports into portable AI memory.
4
+
5
+ ## Features
6
+
7
+ - **Ingest** — parse provider export ZIPs into structured threads; no cloud upload required
8
+ - **Quickstart** — zero-database preview mode; results written to `data/output/` with no setup beyond an OpenAI key
9
+ - **Full pipeline** — persistent storage in PostgreSQL with pgvector; full archive history, batch API for cost-efficient memory generation
10
+ - **Semantic search** — `memories search` queries your memory store by meaning, not just keywords
11
+ - **MCP server** — expose memories and semantic search to Claude Desktop, Cursor, or any MCP client
12
+ - **Personal agent** — multi-turn agent that synthesises higher-level pattern memories, generates a first-person profile, or runs ad-hoc queries against your memory store
13
+
14
+ ## Supported providers
15
+
16
+ | Provider | Status | Data types | Export guide |
17
+ |----------|--------|------------|-------------|
18
+ | ChatGPT | Available | Conversations | [Export your data](https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data) |
19
+ | Instagram | Available | Stories, Reels, Posts | [Download your data](https://help.instagram.com/181231772500920) |
20
+ | WhatsApp | Coming soon | | |
21
+ | Google Takeout | Coming soon | | |
22
+
23
+ ## Getting your export
24
+
25
+ 1. Follow the export guide for your provider in the table above. The export is delivered as a ZIP file — **do not extract it**.
26
+ 2. Move or copy the ZIP into `data/input/` inside the cloned repo:
27
+
28
+ ```
29
+ context-use/
30
+ └── data/
31
+ └── input/
32
+ └── chatgpt-export.zip ← place it here
33
+ ```
34
+
35
+ Both `quickstart` and `pipeline` scan `data/input/` for exports on startup and prompt you to pick one if multiple are present.
36
+
37
+ ## Install
38
+
39
+ ```bash
40
+ git clone https://github.com/onfabric/context-use.git
41
+ cd context-use
42
+ uv sync
43
+ source .venv/bin/activate
44
+ ```
45
+
46
+ Set your OpenAI API key:
47
+
48
+ ```bash
49
+ context-use config set-key
50
+ # or: export OPENAI_API_KEY=sk-...
51
+ ```
52
+
53
+ ## Quick start
54
+
55
+ A zero-setup preview that requires no database.
56
+
57
+ ```bash
58
+ context-use quickstart
59
+ ```
60
+
61
+ The CLI prompts for the export and provider. Memory generation uses the OpenAI **real-time API** — fast for small slices but susceptible to rate limits on large exports. By default only the last 30 days are processed; use `--full` to include the complete history (the CLI warns you before proceeding).
62
+
63
+ The output is a snapshot: memories are written to `data/output/` as Markdown and JSON, then discarded. Nothing is stored in a database, so the memories are not queryable, searchable, or available to the MCP server after the command exits.
64
+
65
+ **The full pipeline is the intended way to use context-use beyond this initial preview.**
66
+
67
+ ## Full pipeline
68
+
69
+ For persistent storage, semantic search, and the MCP server.
70
+
71
+ **1. Set up PostgreSQL (one-time)**
72
+
73
+ ```bash
74
+ context-use config set-store postgres
75
+ ```
76
+
77
+ Prompts to start a local container via Docker, then saves connection details to `~/.config/context-use/config.toml`. Skip Docker if you're bringing your own PostgreSQL instance.
78
+
79
+ **2. Run the pipeline**
80
+
81
+ ```bash
82
+ context-use pipeline
83
+ ```
84
+
85
+ Ingests the export and generates memories via the OpenAI **batch API** — significantly cheaper and more rate-limit-friendly than the real-time API used by quickstart. Typical runtime: 2–10 minutes. Memories are stored in PostgreSQL and persist across sessions, enabling semantic search, the MCP server, and the personal agent.
86
+
87
+ **3. Explore your memories**
88
+
89
+ ```bash
90
+ context-use memories list
91
+ context-use memories search "hiking trips in 2024"
92
+ ```
93
+
94
+ ## MCP server
95
+
96
+ Requires the full pipeline (PostgreSQL).
97
+
98
+ ```bash
99
+ python -m context_use.ext.mcp_use.run
100
+ # use --transport stdio for clients that prefer stdio
101
+ ```
102
+
103
+ Add to your MCP client config (Claude Desktop, Cursor, etc.):
104
+
105
+ ```json
106
+ {
107
+ "mcpServers": {
108
+ "context-use": {
109
+ "command": "python",
110
+ "args": ["-m", "context_use.ext.mcp_use.run", "--transport", "stdio"]
111
+ }
112
+ }
113
+ }
114
+ ```
115
+
116
+ Claude Desktop config path: `~/Library/Application Support/Claude/claude_desktop_config.json`. Cursor: Settings → MCP.
117
+
118
+ ## Personal agent
119
+
120
+ A multi-turn agent that operates over your full memory store. Requires PostgreSQL.
121
+
122
+ ```bash
123
+ context-use config set-agent adk
124
+ context-use agent synthesise # generate higher-level pattern memories
125
+ context-use agent profile # compile a first-person profile
126
+ context-use agent ask "What topics do I keep coming back to across all my conversations?"
127
+ ```
128
+
129
+ ## Configuration
130
+
131
+ Config file: `~/.config/context-use/config.toml`. Run `context-use config show` to see all active values and where each comes from (env var, file, or built-in default).
132
+
133
+ | Setting | CLI command | Env var | Default |
134
+ |---------|-------------|---------|---------|
135
+ | OpenAI API key | `config set-key` | `OPENAI_API_KEY` | — |
136
+ | Model | edit config file | `OPENAI_MODEL` | `gpt-5.2` |
137
+ | Embedding model | edit config file | `OPENAI_EMBEDDING_MODEL` | `text-embedding-3-large` |
138
+ | Store backend | `config set-store postgres\|memory` | `CONTEXT_USE_STORE` | `memory` |
139
+ | PostgreSQL host | `config set-store postgres` | `POSTGRES_HOST` | `localhost` |
140
+ | PostgreSQL port | `config set-store postgres` | `POSTGRES_PORT` | `5432` |
141
+ | PostgreSQL database | `config set-store postgres` | `POSTGRES_DB` | `context_use` |
142
+ | PostgreSQL user | `config set-store postgres` | `POSTGRES_USER` | `postgres` |
143
+ | PostgreSQL password | `config set-store postgres` | `POSTGRES_PASSWORD` | `postgres` |
144
+ | Agent backend | `config set-agent adk` | `CONTEXT_USE_AGENT_BACKEND` | — |
145
+ | Data directory | edit config file | — | `./data` |
146
+
147
+
148
+ ## Adding new providers and pipes
149
+
150
+ See [AGENTS.md](AGENTS.md) for `context-use`'s architecture and how to add new providers and pipes.
151
+
152
+ ## Contributing
153
+
154
+ See [CONTRIBUTING.md](.github/CONTRIBUTING.md) for how to contribute to the `context-use` project.
@@ -0,0 +1,24 @@
1
+ """Public API for the context_use library.
2
+
3
+ External consumers (CLI, MCP server, etc.) should import exclusively
4
+ from this module. Only unit / integration tests may reach into
5
+ sub-packages directly.
6
+ """
7
+
8
+ import context_use.providers # noqa: F401 — triggers provider registration
9
+ from context_use.facade import (
10
+ ContextUse,
11
+ PipelineResult,
12
+ ScheduleInstruction,
13
+ TaskBreakdown,
14
+ )
15
+ from context_use.store import InMemoryStore, Store
16
+
17
+ __all__ = [
18
+ "ContextUse",
19
+ "InMemoryStore",
20
+ "PipelineResult",
21
+ "ScheduleInstruction",
22
+ "Store",
23
+ "TaskBreakdown",
24
+ ]
@@ -0,0 +1,170 @@
1
+ """
2
+ ActivityStreams 2.0 Pydantic Models
3
+
4
+ This package provides comprehensive Pydantic models for all ActivityStreams 2.0 types
5
+ as defined in the W3C ActivityStreams 2.0 Vocabulary specification.
6
+
7
+ The models are strictly compliant with the W3C specification and include:
8
+ - Core Types: Object, Link, Activity, Collection, etc.
9
+ - Activity Types: Accept, Add, Announce, Create, etc.
10
+ - Actor Types: Person, Organization, Service, etc.
11
+ - Object Types: Article, Note, Image, Video, etc.
12
+ - Link Types: Link, Mention, and relationship types
13
+
14
+ Usage:
15
+ from activitystreams import Note, Create, Person
16
+
17
+ # Create a note
18
+ note = Note(
19
+ type="Note",
20
+ content="Hello, ActivityStreams!",
21
+ attributedTo="https://example.com/users/alice"
22
+ )
23
+
24
+ # Create an activity
25
+ activity = Create(
26
+ type="Create",
27
+ actor="https://example.com/users/alice",
28
+ object=note
29
+ )
30
+ """
31
+
32
+ # Core types
33
+ # Activity types
34
+ from .activities import (
35
+ Accept,
36
+ Add,
37
+ Announce,
38
+ Arrive,
39
+ Block,
40
+ Create,
41
+ Delete,
42
+ Dislike,
43
+ Flag,
44
+ Follow,
45
+ Ignore,
46
+ Invite,
47
+ Join,
48
+ Leave,
49
+ Like,
50
+ Listen,
51
+ Move,
52
+ Offer,
53
+ Question,
54
+ Read,
55
+ Reject,
56
+ Remove,
57
+ TentativeAccept,
58
+ TentativeReject,
59
+ Travel,
60
+ Undo,
61
+ Update,
62
+ View,
63
+ )
64
+
65
+ # Actor types
66
+ from .actors import (
67
+ Actor,
68
+ Application,
69
+ Group,
70
+ Organization,
71
+ Person,
72
+ Service,
73
+ )
74
+ from .core import (
75
+ Activity,
76
+ ASType,
77
+ Collection,
78
+ CollectionPage,
79
+ IntransitiveActivity,
80
+ Link,
81
+ Object,
82
+ OrderedCollection,
83
+ OrderedCollectionPage,
84
+ )
85
+
86
+ # Link types and relationship types
87
+ from .links import (
88
+ Mention,
89
+ )
90
+
91
+ # Object types
92
+ from .objects import (
93
+ Article,
94
+ Audio,
95
+ Document,
96
+ Event,
97
+ Image,
98
+ Note,
99
+ Page,
100
+ Place,
101
+ Profile,
102
+ Relationship,
103
+ Tombstone,
104
+ Video,
105
+ )
106
+
107
+ # All exports organized by category
108
+ __all__ = [
109
+ # Core types
110
+ "ASType",
111
+ "Object",
112
+ "Link",
113
+ "Activity",
114
+ "IntransitiveActivity",
115
+ "Collection",
116
+ "OrderedCollection",
117
+ "CollectionPage",
118
+ "OrderedCollectionPage",
119
+ # Activity types
120
+ "Accept",
121
+ "TentativeAccept",
122
+ "Add",
123
+ "Announce",
124
+ "Arrive",
125
+ "Block",
126
+ "Create",
127
+ "Delete",
128
+ "Dislike",
129
+ "Flag",
130
+ "Follow",
131
+ "Ignore",
132
+ "Invite",
133
+ "Join",
134
+ "Leave",
135
+ "Like",
136
+ "Listen",
137
+ "Move",
138
+ "Offer",
139
+ "Question",
140
+ "Reject",
141
+ "TentativeReject",
142
+ "Read",
143
+ "Remove",
144
+ "Travel",
145
+ "Undo",
146
+ "Update",
147
+ "View",
148
+ # Actor types
149
+ "Actor",
150
+ "Application",
151
+ "Group",
152
+ "Organization",
153
+ "Person",
154
+ "Service",
155
+ # Object types
156
+ "Article",
157
+ "Audio",
158
+ "Document",
159
+ "Event",
160
+ "Image",
161
+ "Note",
162
+ "Page",
163
+ "Place",
164
+ "Profile",
165
+ "Relationship",
166
+ "Tombstone",
167
+ "Video",
168
+ # Link types and relationship types
169
+ "Mention",
170
+ ]