codebase-cortex 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codebase_cortex/__init__.py +3 -0
- codebase_cortex/agents/__init__.py +0 -0
- codebase_cortex/agents/base.py +69 -0
- codebase_cortex/agents/code_analyzer.py +122 -0
- codebase_cortex/agents/doc_writer.py +356 -0
- codebase_cortex/agents/semantic_finder.py +64 -0
- codebase_cortex/agents/sprint_reporter.py +152 -0
- codebase_cortex/agents/task_creator.py +138 -0
- codebase_cortex/auth/__init__.py +0 -0
- codebase_cortex/auth/callback_server.py +80 -0
- codebase_cortex/auth/oauth.py +173 -0
- codebase_cortex/auth/token_store.py +90 -0
- codebase_cortex/cli.py +855 -0
- codebase_cortex/config.py +150 -0
- codebase_cortex/embeddings/__init__.py +0 -0
- codebase_cortex/embeddings/clustering.py +140 -0
- codebase_cortex/embeddings/indexer.py +208 -0
- codebase_cortex/embeddings/store.py +126 -0
- codebase_cortex/git/__init__.py +0 -0
- codebase_cortex/git/diff_parser.py +185 -0
- codebase_cortex/git/github_client.py +46 -0
- codebase_cortex/graph.py +111 -0
- codebase_cortex/mcp_client.py +94 -0
- codebase_cortex/notion/__init__.py +0 -0
- codebase_cortex/notion/bootstrap.py +298 -0
- codebase_cortex/notion/page_cache.py +107 -0
- codebase_cortex/state.py +77 -0
- codebase_cortex/utils/__init__.py +0 -0
- codebase_cortex/utils/json_parsing.py +59 -0
- codebase_cortex/utils/logging.py +62 -0
- codebase_cortex/utils/rate_limiter.py +56 -0
- codebase_cortex/utils/section_parser.py +139 -0
- codebase_cortex-0.1.0.dist-info/METADATA +209 -0
- codebase_cortex-0.1.0.dist-info/RECORD +37 -0
- codebase_cortex-0.1.0.dist-info/WHEEL +4 -0
- codebase_cortex-0.1.0.dist-info/entry_points.txt +3 -0
- codebase_cortex-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""SprintReporter agent — generates weekly sprint summaries in Notion."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
|
|
7
|
+
from langchain_core.messages import HumanMessage, SystemMessage
|
|
8
|
+
|
|
9
|
+
from codebase_cortex.agents.base import BaseAgent
|
|
10
|
+
from codebase_cortex.notion.bootstrap import extract_page_id
|
|
11
|
+
from codebase_cortex.state import CortexState
|
|
12
|
+
|
|
13
|
+
SYSTEM_PROMPT = """You are a technical project manager writing a sprint summary.
|
|
14
|
+
Given code analysis data and documentation updates, write a concise weekly sprint report.
|
|
15
|
+
|
|
16
|
+
Structure the report with these sections:
|
|
17
|
+
1. **Sprint Overview** - One paragraph summary
|
|
18
|
+
2. **Key Changes** - Bullet list of significant changes
|
|
19
|
+
3. **Documentation Updates** - What docs were created/updated
|
|
20
|
+
4. **Open Tasks** - Tasks created for documentation gaps
|
|
21
|
+
5. **Metrics** - Files changed, docs updated, tasks created
|
|
22
|
+
|
|
23
|
+
Keep it professional and concise. Use markdown formatting."""
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class SprintReporterAgent(BaseAgent):
|
|
27
|
+
"""Generates sprint summary reports in Notion."""
|
|
28
|
+
|
|
29
|
+
async def run(self, state: CortexState) -> dict:
|
|
30
|
+
analysis = state.get("analysis", "")
|
|
31
|
+
if not analysis:
|
|
32
|
+
return {"sprint_summary": ""}
|
|
33
|
+
|
|
34
|
+
changed_files = state.get("changed_files", [])
|
|
35
|
+
doc_updates = state.get("doc_updates", [])
|
|
36
|
+
tasks_created = state.get("tasks_created", [])
|
|
37
|
+
dry_run = state.get("dry_run", False)
|
|
38
|
+
|
|
39
|
+
now = datetime.now()
|
|
40
|
+
week_start = now - timedelta(days=now.weekday())
|
|
41
|
+
|
|
42
|
+
prompt = f"""Generate a sprint summary for the week of {week_start.strftime('%B %d, %Y')}.
|
|
43
|
+
|
|
44
|
+
## Code Analysis
|
|
45
|
+
{analysis}
|
|
46
|
+
|
|
47
|
+
## Changes
|
|
48
|
+
- {len(changed_files)} files changed
|
|
49
|
+
- {sum(f.get('additions', 0) for f in changed_files)} lines added
|
|
50
|
+
- {sum(f.get('deletions', 0) for f in changed_files)} lines deleted
|
|
51
|
+
|
|
52
|
+
## Documentation Updates
|
|
53
|
+
{self._format_doc_updates(doc_updates)}
|
|
54
|
+
|
|
55
|
+
## Tasks Created
|
|
56
|
+
{self._format_tasks(tasks_created)}
|
|
57
|
+
|
|
58
|
+
Write a complete sprint report in markdown."""
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
messages = [
|
|
62
|
+
SystemMessage(content=SYSTEM_PROMPT),
|
|
63
|
+
HumanMessage(content=prompt),
|
|
64
|
+
]
|
|
65
|
+
sprint_summary = await self._invoke_llm(messages)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
return {
|
|
68
|
+
"sprint_summary": "",
|
|
69
|
+
"errors": self._append_error(state, f"Sprint report failed: {e}"),
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
# Write to Notion Sprint Log page
|
|
73
|
+
if not dry_run and sprint_summary:
|
|
74
|
+
await self._write_to_notion(sprint_summary, week_start, state)
|
|
75
|
+
|
|
76
|
+
return {"sprint_summary": sprint_summary}
|
|
77
|
+
|
|
78
|
+
async def _write_to_notion(
|
|
79
|
+
self, summary: str, week_start: datetime, state: CortexState
|
|
80
|
+
) -> None:
|
|
81
|
+
"""Append sprint summary to the Sprint Log page in Notion."""
|
|
82
|
+
from codebase_cortex.mcp_client import notion_mcp_session, rate_limiter
|
|
83
|
+
from codebase_cortex.config import Settings
|
|
84
|
+
from codebase_cortex.notion.page_cache import PageCache
|
|
85
|
+
from codebase_cortex.utils.logging import get_logger
|
|
86
|
+
|
|
87
|
+
logger = get_logger()
|
|
88
|
+
settings = Settings.from_env()
|
|
89
|
+
cache = PageCache(cache_path=settings.page_cache_path)
|
|
90
|
+
|
|
91
|
+
sprint_page = cache.find_by_title("Sprint Log")
|
|
92
|
+
parent_page = cache.find_by_title("Codebase Cortex")
|
|
93
|
+
parent_id = (sprint_page or parent_page)
|
|
94
|
+
parent_id = parent_id.page_id if parent_id else None
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
async with notion_mcp_session(settings) as session:
|
|
98
|
+
await rate_limiter.acquire()
|
|
99
|
+
|
|
100
|
+
week_label = week_start.strftime("%B %d, %Y")
|
|
101
|
+
content = f"# Sprint Report — Week of {week_label}\n\n{summary}"
|
|
102
|
+
|
|
103
|
+
if sprint_page:
|
|
104
|
+
# Append to existing Sprint Log using insert_content_after
|
|
105
|
+
await session.call_tool(
|
|
106
|
+
"notion-update-page",
|
|
107
|
+
arguments={
|
|
108
|
+
"page_id": sprint_page.page_id,
|
|
109
|
+
"command": "insert_content_after",
|
|
110
|
+
"selection_with_ellipsis": "---\n*Auto-gen...by Codebase Cortex*",
|
|
111
|
+
"new_str": f"\n\n---\n\n{content}",
|
|
112
|
+
},
|
|
113
|
+
)
|
|
114
|
+
logger.info(f"Appended to Sprint Log for week of {week_label}")
|
|
115
|
+
else:
|
|
116
|
+
# Create new sprint report page
|
|
117
|
+
create_args: dict = {
|
|
118
|
+
"pages": [
|
|
119
|
+
{
|
|
120
|
+
"properties": {"title": f"📋 Sprint Report — {week_label}"},
|
|
121
|
+
"content": content,
|
|
122
|
+
}
|
|
123
|
+
],
|
|
124
|
+
}
|
|
125
|
+
if parent_id:
|
|
126
|
+
create_args["parent"] = {"page_id": parent_id}
|
|
127
|
+
|
|
128
|
+
result = await session.call_tool(
|
|
129
|
+
"notion-create-pages",
|
|
130
|
+
arguments=create_args,
|
|
131
|
+
)
|
|
132
|
+
page_id = extract_page_id(result)
|
|
133
|
+
if page_id:
|
|
134
|
+
cache.upsert(page_id, "Sprint Log")
|
|
135
|
+
logger.info(f"Created Sprint Report for week of {week_label}")
|
|
136
|
+
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.error(f"Failed to write sprint report: {e}")
|
|
139
|
+
|
|
140
|
+
@staticmethod
|
|
141
|
+
def _format_doc_updates(updates: list[dict]) -> str:
|
|
142
|
+
if not updates:
|
|
143
|
+
return "No documentation updates this sprint."
|
|
144
|
+
return "\n".join(f"- {u.get('title', 'Untitled')} ({u.get('action', 'update')})" for u in updates)
|
|
145
|
+
|
|
146
|
+
@staticmethod
|
|
147
|
+
def _format_tasks(tasks: list[dict]) -> str:
|
|
148
|
+
if not tasks:
|
|
149
|
+
return "No new tasks created."
|
|
150
|
+
return "\n".join(
|
|
151
|
+
f"- [{t.get('priority', 'medium')}] {t.get('title', 'Untitled')}" for t in tasks
|
|
152
|
+
)
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"""TaskCreator agent — creates Notion tasks for undocumented areas."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from langchain_core.messages import HumanMessage, SystemMessage
|
|
6
|
+
|
|
7
|
+
from codebase_cortex.agents.base import BaseAgent
|
|
8
|
+
from codebase_cortex.notion.bootstrap import extract_page_id
|
|
9
|
+
from codebase_cortex.state import CortexState, TaskItem
|
|
10
|
+
from codebase_cortex.utils.json_parsing import parse_json_array
|
|
11
|
+
|
|
12
|
+
SYSTEM_PROMPT = """You are a documentation quality analyst. Given a code analysis,
|
|
13
|
+
identify areas that need documentation but don't have it yet.
|
|
14
|
+
|
|
15
|
+
Output a JSON array of tasks, each with:
|
|
16
|
+
- "title": Brief task title (imperative form, e.g., "Document the auth flow")
|
|
17
|
+
- "description": What needs to be documented and why
|
|
18
|
+
- "priority": "high" (breaking changes, new APIs), "medium" (new features), or "low" (refactors, minor changes)
|
|
19
|
+
|
|
20
|
+
Only create tasks for genuinely undocumented or under-documented areas.
|
|
21
|
+
Don't create tasks for trivial changes (formatting, typos, minor refactors).
|
|
22
|
+
Respond with ONLY the JSON array. If nothing needs documenting, return []."""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class TaskCreatorAgent(BaseAgent):
|
|
26
|
+
"""Creates tasks in Notion for areas needing documentation."""
|
|
27
|
+
|
|
28
|
+
async def run(self, state: CortexState) -> dict:
|
|
29
|
+
analysis = state.get("analysis", "")
|
|
30
|
+
if not analysis:
|
|
31
|
+
return {"tasks_created": []}
|
|
32
|
+
|
|
33
|
+
doc_updates = state.get("doc_updates", [])
|
|
34
|
+
dry_run = state.get("dry_run", False)
|
|
35
|
+
|
|
36
|
+
# Build context about what was already documented
|
|
37
|
+
already_documented = ""
|
|
38
|
+
if doc_updates:
|
|
39
|
+
already_documented = "\n\n## Already Documented\nThe following pages were just updated:\n"
|
|
40
|
+
for d in doc_updates:
|
|
41
|
+
already_documented += f"- {d['title']} ({d['action']})\n"
|
|
42
|
+
|
|
43
|
+
prompt = f"""Review this code analysis and identify documentation gaps.
|
|
44
|
+
|
|
45
|
+
## Code Analysis
|
|
46
|
+
{analysis}
|
|
47
|
+
{already_documented}
|
|
48
|
+
|
|
49
|
+
Create tasks only for areas NOT already covered by the doc updates above.
|
|
50
|
+
Respond with a JSON array of tasks (title, description, priority). Return [] if all areas are covered."""
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
messages = [
|
|
54
|
+
SystemMessage(content=SYSTEM_PROMPT),
|
|
55
|
+
HumanMessage(content=prompt),
|
|
56
|
+
]
|
|
57
|
+
raw = await self._invoke_llm(messages)
|
|
58
|
+
|
|
59
|
+
tasks_data = parse_json_array(raw)
|
|
60
|
+
|
|
61
|
+
except Exception as e:
|
|
62
|
+
return {
|
|
63
|
+
"tasks_created": [],
|
|
64
|
+
"errors": self._append_error(state, f"Task creation failed: {e}"),
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
tasks: list[TaskItem] = [
|
|
68
|
+
TaskItem(
|
|
69
|
+
title=t.get("title", "Untitled task"),
|
|
70
|
+
description=t.get("description", ""),
|
|
71
|
+
priority=t.get("priority", "medium"),
|
|
72
|
+
)
|
|
73
|
+
for t in tasks_data
|
|
74
|
+
]
|
|
75
|
+
|
|
76
|
+
# Create tasks in Notion (unless dry_run)
|
|
77
|
+
if not dry_run and tasks:
|
|
78
|
+
await self._create_in_notion(tasks, state)
|
|
79
|
+
|
|
80
|
+
return {"tasks_created": tasks}
|
|
81
|
+
|
|
82
|
+
async def _create_in_notion(self, tasks: list[TaskItem], state: CortexState) -> None:
|
|
83
|
+
"""Create task items as child pages under the Task Board in Notion."""
|
|
84
|
+
from codebase_cortex.mcp_client import notion_mcp_session, rate_limiter
|
|
85
|
+
from codebase_cortex.config import Settings
|
|
86
|
+
from codebase_cortex.notion.page_cache import PageCache
|
|
87
|
+
from codebase_cortex.utils.logging import get_logger
|
|
88
|
+
|
|
89
|
+
logger = get_logger()
|
|
90
|
+
settings = Settings.from_env()
|
|
91
|
+
cache = PageCache(cache_path=settings.page_cache_path)
|
|
92
|
+
|
|
93
|
+
task_board = cache.find_by_title("Task Board")
|
|
94
|
+
parent_page = cache.find_by_title("Codebase Cortex")
|
|
95
|
+
parent_id = (task_board or parent_page)
|
|
96
|
+
parent_id = parent_id.page_id if parent_id else None
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
async with notion_mcp_session(settings) as session:
|
|
100
|
+
for task in tasks:
|
|
101
|
+
await rate_limiter.acquire()
|
|
102
|
+
|
|
103
|
+
priority_icon = {"high": "🔴", "medium": "🟡", "low": "🟢"}.get(
|
|
104
|
+
task["priority"], "⚪"
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
title = f"{priority_icon} {task['title']}"
|
|
108
|
+
content = (
|
|
109
|
+
f"## {task['title']}\n\n"
|
|
110
|
+
f"**Priority:** {task['priority']}\n\n"
|
|
111
|
+
f"{task['description']}\n\n"
|
|
112
|
+
f"---\n*Auto-created by Codebase Cortex*"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
create_args: dict = {
|
|
116
|
+
"pages": [
|
|
117
|
+
{
|
|
118
|
+
"properties": {"title": title},
|
|
119
|
+
"content": content,
|
|
120
|
+
}
|
|
121
|
+
],
|
|
122
|
+
}
|
|
123
|
+
if parent_id:
|
|
124
|
+
create_args["parent"] = {"page_id": parent_id}
|
|
125
|
+
|
|
126
|
+
result = await session.call_tool(
|
|
127
|
+
"notion-create-pages",
|
|
128
|
+
arguments=create_args,
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
page_id = extract_page_id(result)
|
|
132
|
+
if page_id:
|
|
133
|
+
cache.upsert(page_id, task["title"])
|
|
134
|
+
|
|
135
|
+
logger.info(f"Created task: {priority_icon} {task['title']}")
|
|
136
|
+
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.error(f"Failed to create tasks in Notion: {e}")
|
|
File without changes
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""Local HTTP callback server for OAuth authorization code receipt."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from http.server import HTTPServer, BaseHTTPRequestHandler
|
|
7
|
+
from urllib.parse import urlparse, parse_qs
|
|
8
|
+
from threading import Thread
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class _CallbackHandler(BaseHTTPRequestHandler):
|
|
12
|
+
"""Handles the OAuth callback and extracts the authorization code."""
|
|
13
|
+
|
|
14
|
+
code: str | None = None
|
|
15
|
+
state: str | None = None
|
|
16
|
+
error: str | None = None
|
|
17
|
+
|
|
18
|
+
def do_GET(self) -> None:
|
|
19
|
+
parsed = urlparse(self.path)
|
|
20
|
+
params = parse_qs(parsed.query)
|
|
21
|
+
|
|
22
|
+
if "error" in params:
|
|
23
|
+
_CallbackHandler.error = params["error"][0]
|
|
24
|
+
self._respond("Authorization failed. You can close this tab.")
|
|
25
|
+
elif "code" in params:
|
|
26
|
+
_CallbackHandler.code = params["code"][0]
|
|
27
|
+
_CallbackHandler.state = params.get("state", [None])[0]
|
|
28
|
+
self._respond("Authorization successful! You can close this tab.")
|
|
29
|
+
else:
|
|
30
|
+
self._respond("Unexpected callback. You can close this tab.", status=400)
|
|
31
|
+
|
|
32
|
+
def _respond(self, message: str, status: int = 200) -> None:
|
|
33
|
+
self.send_response(status)
|
|
34
|
+
self.send_header("Content-Type", "text/html")
|
|
35
|
+
self.end_headers()
|
|
36
|
+
html = f"<html><body><h2>{message}</h2></body></html>"
|
|
37
|
+
self.wfile.write(html.encode())
|
|
38
|
+
|
|
39
|
+
def log_message(self, format: str, *args: object) -> None:
|
|
40
|
+
pass # Suppress request logging
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
async def wait_for_callback(
|
|
44
|
+
port: int = 9876,
|
|
45
|
+
timeout: float = 120.0,
|
|
46
|
+
) -> tuple[str, str | None]:
|
|
47
|
+
"""Start a local HTTP server and wait for the OAuth callback.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
port: Port to listen on.
|
|
51
|
+
timeout: Maximum seconds to wait for the callback.
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
Tuple of (authorization_code, state).
|
|
55
|
+
|
|
56
|
+
Raises:
|
|
57
|
+
TimeoutError: If no callback received within timeout.
|
|
58
|
+
RuntimeError: If the authorization was denied or failed.
|
|
59
|
+
"""
|
|
60
|
+
_CallbackHandler.code = None
|
|
61
|
+
_CallbackHandler.state = None
|
|
62
|
+
_CallbackHandler.error = None
|
|
63
|
+
|
|
64
|
+
server = HTTPServer(("localhost", port), _CallbackHandler)
|
|
65
|
+
thread = Thread(target=server.serve_forever, daemon=True)
|
|
66
|
+
thread.start()
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
elapsed = 0.0
|
|
70
|
+
interval = 0.5
|
|
71
|
+
while elapsed < timeout:
|
|
72
|
+
if _CallbackHandler.code is not None:
|
|
73
|
+
return _CallbackHandler.code, _CallbackHandler.state
|
|
74
|
+
if _CallbackHandler.error is not None:
|
|
75
|
+
raise RuntimeError(f"OAuth error: {_CallbackHandler.error}")
|
|
76
|
+
await asyncio.sleep(interval)
|
|
77
|
+
elapsed += interval
|
|
78
|
+
raise TimeoutError("OAuth callback timed out")
|
|
79
|
+
finally:
|
|
80
|
+
server.shutdown()
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"""OAuth 2.0 + PKCE flow for Notion MCP authorization."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
import hashlib
|
|
7
|
+
import secrets
|
|
8
|
+
import webbrowser
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
|
|
12
|
+
NOTION_MCP_METADATA_URL = "https://mcp.notion.com/.well-known/oauth-authorization-server"
|
|
13
|
+
|
|
14
|
+
# Fallback URLs (prefer metadata-discovered endpoints)
|
|
15
|
+
NOTION_AUTH_URL = "https://mcp.notion.com/authorize"
|
|
16
|
+
NOTION_TOKEN_URL = "https://mcp.notion.com/token"
|
|
17
|
+
NOTION_CLIENT_REGISTRATION_URL = "https://mcp.notion.com/register"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def generate_pkce_pair() -> tuple[str, str]:
|
|
21
|
+
"""Generate a PKCE code verifier and challenge (S256).
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
Tuple of (code_verifier, code_challenge).
|
|
25
|
+
"""
|
|
26
|
+
verifier = secrets.token_urlsafe(64)
|
|
27
|
+
digest = hashlib.sha256(verifier.encode("ascii")).digest()
|
|
28
|
+
challenge = base64.urlsafe_b64encode(digest).rstrip(b"=").decode("ascii")
|
|
29
|
+
return verifier, challenge
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
async def fetch_oauth_metadata() -> dict:
|
|
33
|
+
"""Fetch the OAuth authorization server metadata from Notion MCP."""
|
|
34
|
+
async with httpx.AsyncClient() as client:
|
|
35
|
+
resp = await client.get(NOTION_MCP_METADATA_URL)
|
|
36
|
+
resp.raise_for_status()
|
|
37
|
+
return resp.json()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
async def register_client(
|
|
41
|
+
redirect_uri: str,
|
|
42
|
+
registration_endpoint: str | None = None,
|
|
43
|
+
) -> dict:
|
|
44
|
+
"""Dynamically register an OAuth client with Notion MCP.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
redirect_uri: The callback URI (e.g., http://localhost:9876/callback).
|
|
48
|
+
registration_endpoint: Override registration endpoint URL.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Client registration response with client_id and client_secret.
|
|
52
|
+
"""
|
|
53
|
+
endpoint = registration_endpoint or NOTION_CLIENT_REGISTRATION_URL
|
|
54
|
+
async with httpx.AsyncClient() as client:
|
|
55
|
+
resp = await client.post(
|
|
56
|
+
endpoint,
|
|
57
|
+
json={
|
|
58
|
+
"client_name": "Codebase Cortex",
|
|
59
|
+
"redirect_uris": [redirect_uri],
|
|
60
|
+
"grant_types": ["authorization_code", "refresh_token"],
|
|
61
|
+
"response_types": ["code"],
|
|
62
|
+
"token_endpoint_auth_method": "client_secret_post",
|
|
63
|
+
},
|
|
64
|
+
)
|
|
65
|
+
resp.raise_for_status()
|
|
66
|
+
return resp.json()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def build_authorization_url(
|
|
70
|
+
client_id: str,
|
|
71
|
+
redirect_uri: str,
|
|
72
|
+
code_challenge: str,
|
|
73
|
+
state: str,
|
|
74
|
+
authorization_endpoint: str | None = None,
|
|
75
|
+
) -> str:
|
|
76
|
+
"""Build the Notion OAuth authorization URL.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
client_id: The registered client ID.
|
|
80
|
+
redirect_uri: Callback URI.
|
|
81
|
+
code_challenge: PKCE S256 challenge.
|
|
82
|
+
state: Random state for CSRF protection.
|
|
83
|
+
authorization_endpoint: Override auth endpoint URL.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
The full authorization URL to open in a browser.
|
|
87
|
+
"""
|
|
88
|
+
endpoint = authorization_endpoint or NOTION_AUTH_URL
|
|
89
|
+
params = httpx.QueryParams(
|
|
90
|
+
client_id=client_id,
|
|
91
|
+
redirect_uri=redirect_uri,
|
|
92
|
+
response_type="code",
|
|
93
|
+
code_challenge=code_challenge,
|
|
94
|
+
code_challenge_method="S256",
|
|
95
|
+
state=state,
|
|
96
|
+
owner="user",
|
|
97
|
+
)
|
|
98
|
+
return f"{endpoint}?{params}"
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
async def exchange_code(
|
|
102
|
+
code: str,
|
|
103
|
+
client_id: str,
|
|
104
|
+
client_secret: str,
|
|
105
|
+
redirect_uri: str,
|
|
106
|
+
code_verifier: str,
|
|
107
|
+
token_endpoint: str | None = None,
|
|
108
|
+
) -> dict:
|
|
109
|
+
"""Exchange authorization code for access and refresh tokens.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
code: The authorization code from the callback.
|
|
113
|
+
client_id: Registered client ID.
|
|
114
|
+
client_secret: Registered client secret.
|
|
115
|
+
redirect_uri: The same redirect URI used in authorization.
|
|
116
|
+
code_verifier: The PKCE code verifier.
|
|
117
|
+
token_endpoint: Override token endpoint URL.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Token response with access_token, refresh_token, expires_in.
|
|
121
|
+
"""
|
|
122
|
+
endpoint = token_endpoint or NOTION_TOKEN_URL
|
|
123
|
+
async with httpx.AsyncClient() as client:
|
|
124
|
+
resp = await client.post(
|
|
125
|
+
endpoint,
|
|
126
|
+
data={
|
|
127
|
+
"grant_type": "authorization_code",
|
|
128
|
+
"code": code,
|
|
129
|
+
"redirect_uri": redirect_uri,
|
|
130
|
+
"client_id": client_id,
|
|
131
|
+
"client_secret": client_secret,
|
|
132
|
+
"code_verifier": code_verifier,
|
|
133
|
+
},
|
|
134
|
+
)
|
|
135
|
+
resp.raise_for_status()
|
|
136
|
+
return resp.json()
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
async def refresh_access_token(
|
|
140
|
+
refresh_token: str,
|
|
141
|
+
client_id: str,
|
|
142
|
+
client_secret: str,
|
|
143
|
+
token_endpoint: str | None = None,
|
|
144
|
+
) -> dict:
|
|
145
|
+
"""Refresh an expired access token.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
refresh_token: The refresh token.
|
|
149
|
+
client_id: Registered client ID.
|
|
150
|
+
client_secret: Registered client secret.
|
|
151
|
+
token_endpoint: Override token endpoint URL.
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
New token response with access_token and possibly new refresh_token.
|
|
155
|
+
"""
|
|
156
|
+
endpoint = token_endpoint or NOTION_TOKEN_URL
|
|
157
|
+
async with httpx.AsyncClient() as client:
|
|
158
|
+
resp = await client.post(
|
|
159
|
+
endpoint,
|
|
160
|
+
data={
|
|
161
|
+
"grant_type": "refresh_token",
|
|
162
|
+
"refresh_token": refresh_token,
|
|
163
|
+
"client_id": client_id,
|
|
164
|
+
"client_secret": client_secret,
|
|
165
|
+
},
|
|
166
|
+
)
|
|
167
|
+
resp.raise_for_status()
|
|
168
|
+
return resp.json()
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def open_browser(url: str) -> None:
|
|
172
|
+
"""Open the authorization URL in the user's browser."""
|
|
173
|
+
webbrowser.open(url)
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""Persist and manage OAuth tokens for Notion MCP."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import time
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
|
|
10
|
+
from codebase_cortex.auth.oauth import refresh_access_token
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class TokenData:
|
|
15
|
+
"""Stored OAuth token data."""
|
|
16
|
+
|
|
17
|
+
access_token: str
|
|
18
|
+
refresh_token: str
|
|
19
|
+
expires_at: float
|
|
20
|
+
client_id: str
|
|
21
|
+
client_secret: str
|
|
22
|
+
token_endpoint: str | None = None
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def is_expired(self) -> bool:
|
|
26
|
+
return time.time() >= self.expires_at - 60 # 60s buffer
|
|
27
|
+
|
|
28
|
+
def to_dict(self) -> dict:
|
|
29
|
+
return {
|
|
30
|
+
"access_token": self.access_token,
|
|
31
|
+
"refresh_token": self.refresh_token,
|
|
32
|
+
"expires_at": self.expires_at,
|
|
33
|
+
"client_id": self.client_id,
|
|
34
|
+
"client_secret": self.client_secret,
|
|
35
|
+
"token_endpoint": self.token_endpoint,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def from_dict(cls, data: dict) -> TokenData:
|
|
40
|
+
return cls(**data)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def save_tokens(token_data: TokenData, path: Path) -> None:
|
|
44
|
+
"""Save token data to a JSON file."""
|
|
45
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
46
|
+
path.write_text(json.dumps(token_data.to_dict(), indent=2))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def load_tokens(path: Path) -> TokenData | None:
|
|
50
|
+
"""Load token data from a JSON file, or None if not found."""
|
|
51
|
+
if not path.exists():
|
|
52
|
+
return None
|
|
53
|
+
data = json.loads(path.read_text())
|
|
54
|
+
return TokenData.from_dict(data)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def get_valid_token(path: Path) -> str:
|
|
58
|
+
"""Get a valid access token, refreshing if expired.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
path: Path to the token JSON file.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
A valid access token string.
|
|
65
|
+
|
|
66
|
+
Raises:
|
|
67
|
+
FileNotFoundError: If no tokens are stored.
|
|
68
|
+
RuntimeError: If refresh fails.
|
|
69
|
+
"""
|
|
70
|
+
token_data = load_tokens(path)
|
|
71
|
+
if token_data is None:
|
|
72
|
+
raise FileNotFoundError(f"No tokens found at {path}. Run 'cortex init' first.")
|
|
73
|
+
|
|
74
|
+
if not token_data.is_expired:
|
|
75
|
+
return token_data.access_token
|
|
76
|
+
|
|
77
|
+
result = await refresh_access_token(
|
|
78
|
+
refresh_token=token_data.refresh_token,
|
|
79
|
+
client_id=token_data.client_id,
|
|
80
|
+
client_secret=token_data.client_secret,
|
|
81
|
+
token_endpoint=token_data.token_endpoint,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
token_data.access_token = result["access_token"]
|
|
85
|
+
if "refresh_token" in result:
|
|
86
|
+
token_data.refresh_token = result["refresh_token"]
|
|
87
|
+
token_data.expires_at = time.time() + result.get("expires_in", 3600)
|
|
88
|
+
save_tokens(token_data, path)
|
|
89
|
+
|
|
90
|
+
return token_data.access_token
|