codebase-cortex 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. codebase_cortex/__init__.py +3 -0
  2. codebase_cortex/agents/__init__.py +0 -0
  3. codebase_cortex/agents/base.py +69 -0
  4. codebase_cortex/agents/code_analyzer.py +122 -0
  5. codebase_cortex/agents/doc_writer.py +356 -0
  6. codebase_cortex/agents/semantic_finder.py +64 -0
  7. codebase_cortex/agents/sprint_reporter.py +152 -0
  8. codebase_cortex/agents/task_creator.py +138 -0
  9. codebase_cortex/auth/__init__.py +0 -0
  10. codebase_cortex/auth/callback_server.py +80 -0
  11. codebase_cortex/auth/oauth.py +173 -0
  12. codebase_cortex/auth/token_store.py +90 -0
  13. codebase_cortex/cli.py +855 -0
  14. codebase_cortex/config.py +150 -0
  15. codebase_cortex/embeddings/__init__.py +0 -0
  16. codebase_cortex/embeddings/clustering.py +140 -0
  17. codebase_cortex/embeddings/indexer.py +208 -0
  18. codebase_cortex/embeddings/store.py +126 -0
  19. codebase_cortex/git/__init__.py +0 -0
  20. codebase_cortex/git/diff_parser.py +185 -0
  21. codebase_cortex/git/github_client.py +46 -0
  22. codebase_cortex/graph.py +111 -0
  23. codebase_cortex/mcp_client.py +94 -0
  24. codebase_cortex/notion/__init__.py +0 -0
  25. codebase_cortex/notion/bootstrap.py +298 -0
  26. codebase_cortex/notion/page_cache.py +107 -0
  27. codebase_cortex/state.py +77 -0
  28. codebase_cortex/utils/__init__.py +0 -0
  29. codebase_cortex/utils/json_parsing.py +59 -0
  30. codebase_cortex/utils/logging.py +62 -0
  31. codebase_cortex/utils/rate_limiter.py +56 -0
  32. codebase_cortex/utils/section_parser.py +139 -0
  33. codebase_cortex-0.1.0.dist-info/METADATA +209 -0
  34. codebase_cortex-0.1.0.dist-info/RECORD +37 -0
  35. codebase_cortex-0.1.0.dist-info/WHEEL +4 -0
  36. codebase_cortex-0.1.0.dist-info/entry_points.txt +3 -0
  37. codebase_cortex-0.1.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,139 @@
1
+ """Markdown section parser for section-level page updates."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from dataclasses import dataclass
7
+
8
+
9
+ @dataclass
10
+ class Section:
11
+ """A section of a markdown document."""
12
+
13
+ heading: str # Full heading line e.g. "## API Endpoints" (empty for preamble)
14
+ level: int # Number of # symbols (0 for preamble)
15
+ content: str # Content between this heading and the next
16
+
17
+ @property
18
+ def full_text(self) -> str:
19
+ """Reconstruct the section as it appears in the document."""
20
+ if self.heading and self.content:
21
+ return f"{self.heading}\n{self.content}"
22
+ return self.heading or self.content
23
+
24
+
25
+ def parse_sections(markdown: str) -> list[Section]:
26
+ """Parse markdown into a flat list of sections split by headings.
27
+
28
+ Each section contains the heading line and all content until the next heading.
29
+ Content before the first heading becomes a preamble section with level 0.
30
+ """
31
+ if not markdown or not markdown.strip():
32
+ return []
33
+
34
+ lines = markdown.split("\n")
35
+ sections: list[Section] = []
36
+ current_heading = ""
37
+ current_level = 0
38
+ buffer: list[str] = []
39
+
40
+ for line in lines:
41
+ match = re.match(r"^(#{1,6})\s+", line)
42
+ if match:
43
+ # Flush previous section
44
+ content = "\n".join(buffer).strip()
45
+ if current_heading or content:
46
+ sections.append(Section(
47
+ heading=current_heading,
48
+ level=current_level,
49
+ content=content,
50
+ ))
51
+ current_heading = line
52
+ current_level = len(match.group(1))
53
+ buffer = []
54
+ else:
55
+ buffer.append(line)
56
+
57
+ # Flush last section
58
+ content = "\n".join(buffer).strip()
59
+ if current_heading or content:
60
+ sections.append(Section(
61
+ heading=current_heading,
62
+ level=current_level,
63
+ content=content,
64
+ ))
65
+
66
+ return sections
67
+
68
+
69
+ def normalize_heading(heading: str) -> str:
70
+ """Normalize a heading for fuzzy comparison.
71
+
72
+ Strips # symbols, extra whitespace, and lowercases.
73
+ """
74
+ stripped = re.sub(r"^#+\s*", "", heading).strip()
75
+ return stripped.lower()
76
+
77
+
78
+ def merge_sections(
79
+ existing_sections: list[Section],
80
+ section_updates: list[dict],
81
+ ) -> str:
82
+ """Merge section-level updates into existing page content.
83
+
84
+ For each update:
85
+ - action "update": replaces the matching section's content (heading preserved)
86
+ - action "create": appends as a new section at the end
87
+
88
+ If an "update" heading doesn't match any existing section, it's appended
89
+ as a new section (graceful fallback).
90
+
91
+ Returns the full reconstructed page content.
92
+ """
93
+ update_map: dict[str, dict] = {}
94
+ new_sections: list[dict] = []
95
+
96
+ for update in section_updates:
97
+ action = update.get("action", "update")
98
+ if action == "create":
99
+ new_sections.append(update)
100
+ else:
101
+ norm = normalize_heading(update.get("heading", ""))
102
+ if norm:
103
+ update_map[norm] = update
104
+
105
+ result_parts: list[str] = []
106
+ matched: set[str] = set()
107
+
108
+ for section in existing_sections:
109
+ norm = normalize_heading(section.heading) if section.heading else ""
110
+ if norm and norm in update_map:
111
+ update = update_map[norm]
112
+ matched.add(norm)
113
+ new_content = update.get("content", "").strip()
114
+ # Keep original heading format, replace content
115
+ if section.heading:
116
+ result_parts.append(f"{section.heading}\n{new_content}")
117
+ else:
118
+ result_parts.append(new_content)
119
+ else:
120
+ result_parts.append(section.full_text)
121
+
122
+ # Append unmatched "update" entries as new sections (heading not found)
123
+ for norm, update in update_map.items():
124
+ if norm not in matched:
125
+ heading = update.get("heading", "")
126
+ content = update.get("content", "").strip()
127
+ if heading and content:
128
+ result_parts.append(f"{heading}\n{content}")
129
+
130
+ # Append explicitly new sections
131
+ for new_sec in new_sections:
132
+ heading = new_sec.get("heading", "")
133
+ content = new_sec.get("content", "").strip()
134
+ if heading and content:
135
+ result_parts.append(f"{heading}\n{content}")
136
+ elif content:
137
+ result_parts.append(content)
138
+
139
+ return "\n\n".join(part for part in result_parts if part.strip())
@@ -0,0 +1,209 @@
1
+ Metadata-Version: 2.4
2
+ Name: codebase-cortex
3
+ Version: 0.1.0
4
+ Summary: AI-powered documentation autopilot — commit code, docs update themselves. Five LangGraph agents analyze diffs, find related code via FAISS embeddings, and sync Notion pages through MCP.
5
+ Project-URL: Homepage, https://github.com/sarupurisailalith/codebase-cortex
6
+ Project-URL: Repository, https://github.com/sarupurisailalith/codebase-cortex
7
+ Project-URL: Documentation, https://github.com/sarupurisailalith/codebase-cortex/tree/main/docs
8
+ Project-URL: Issues, https://github.com/sarupurisailalith/codebase-cortex/issues
9
+ Author-email: Sailalith Sarupuri <sarupurisailalith@gmail.com>
10
+ License-Expression: MIT
11
+ License-File: LICENSE
12
+ Keywords: agents,automation,code-analysis,developer-tools,docs-as-code,documentation,embeddings,faiss,git,langgraph,mcp,model-context-protocol,multi-agent,notion,semantic-search
13
+ Classifier: Development Status :: 4 - Beta
14
+ Classifier: Environment :: Console
15
+ Classifier: Intended Audience :: Developers
16
+ Classifier: License :: OSI Approved :: MIT License
17
+ Classifier: Operating System :: OS Independent
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
+ Classifier: Programming Language :: Python :: 3.14
23
+ Classifier: Topic :: Software Development :: Documentation
24
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
25
+ Classifier: Topic :: Software Development :: Version Control :: Git
26
+ Classifier: Topic :: Text Processing :: Markup :: Markdown
27
+ Classifier: Typing :: Typed
28
+ Requires-Python: >=3.11
29
+ Requires-Dist: click>=8.0
30
+ Requires-Dist: faiss-cpu>=1.8
31
+ Requires-Dist: gitpython>=3.1
32
+ Requires-Dist: hdbscan>=0.8
33
+ Requires-Dist: httpx>=0.27
34
+ Requires-Dist: langchain-anthropic>=0.3
35
+ Requires-Dist: langchain-core>=0.3
36
+ Requires-Dist: langchain-google-genai>=2.1
37
+ Requires-Dist: langchain-mcp-adapters>=0.1
38
+ Requires-Dist: langchain-openai>=0.3
39
+ Requires-Dist: langgraph>=0.3
40
+ Requires-Dist: mcp>=1.0
41
+ Requires-Dist: pygithub>=2.0
42
+ Requires-Dist: python-dotenv>=1.0
43
+ Requires-Dist: rich>=13.0
44
+ Requires-Dist: sentence-transformers>=3.0
45
+ Description-Content-Type: text/markdown
46
+
47
+ # Codebase Cortex
48
+
49
+ **Automatically keep your engineering documentation in sync with code.**
50
+
51
+ Codebase Cortex is a multi-agent system that watches your codebase for changes and updates your Notion documentation automatically. It uses LangGraph to orchestrate five specialized AI agents that analyze code, find related docs, write updates, create tasks, and generate sprint reports — all through the [Notion MCP](https://developers.notion.com/docs/mcp) protocol.
52
+
53
+ ```mermaid
54
+ graph LR
55
+ A[Git Commit] --> B[CodeAnalyzer]
56
+ B --> C[SemanticFinder]
57
+ C --> D[DocWriter]
58
+ D --> E[TaskCreator]
59
+ E --> F[SprintReporter]
60
+ F --> G[Notion Workspace]
61
+ ```
62
+
63
+ ## Features
64
+
65
+ - **Automatic doc sync** — Commit code, docs update themselves via post-commit hook
66
+ - **Section-level updates** — Only changed sections are rewritten, preserving the rest
67
+ - **Semantic search** — FAISS embeddings find related code across your entire codebase
68
+ - **Natural language prompts** — `cortex prompt "Add more API examples"` to direct updates
69
+ - **Multi-page intelligence** — Agents understand relationships across all your doc pages
70
+ - **Sprint reports** — Weekly summaries generated from commit activity
71
+ - **Task tracking** — Automatically identifies undocumented areas and creates Notion tasks
72
+
73
+ ## Quick Start
74
+
75
+ ### Prerequisites
76
+
77
+ - Python 3.11+
78
+ - [uv](https://docs.astral.sh/uv/) package manager
79
+ - A Notion account (free plan works)
80
+ - An LLM API key (Google Gemini, Anthropic, or OpenRouter)
81
+
82
+ ### Install
83
+
84
+ ```bash
85
+ # Install from PyPI
86
+ pip install codebase-cortex
87
+
88
+ # Or with uv
89
+ uv tool install codebase-cortex
90
+ ```
91
+
92
+ Both `cortex` and `codebase-cortex` commands are available after installation. If `cortex` conflicts with another package on your system, use `codebase-cortex` instead.
93
+
94
+ <details>
95
+ <summary>Install from source</summary>
96
+
97
+ ```bash
98
+ git clone https://github.com/sarupurisailalith/codebase-cortex.git
99
+ cd codebase-cortex
100
+ uv sync
101
+ uv tool install .
102
+ ```
103
+ </details>
104
+
105
+ ### Initialize in your project
106
+
107
+ ```bash
108
+ cd /path/to/your-project
109
+
110
+ # Interactive setup — connects to Notion, configures LLM, creates starter pages
111
+ cortex init
112
+
113
+ # Run the pipeline
114
+ cortex run --once
115
+ ```
116
+
117
+ The `init` wizard will:
118
+ 1. Ask for your LLM provider and API key
119
+ 2. Open a browser for Notion OAuth authorization
120
+ 3. Create starter documentation pages in Notion
121
+ 4. Optionally install a post-commit git hook
122
+
123
+ ## CLI Commands
124
+
125
+ | Command | Description |
126
+ |---------|-------------|
127
+ | `cortex init` | Interactive setup wizard |
128
+ | `cortex run --once` | Run the full pipeline once |
129
+ | `cortex run --once --full` | Full codebase analysis (not just recent diff) |
130
+ | `cortex run --once --dry-run` | Analyze without writing to Notion |
131
+ | `cortex prompt "instruction"` | Natural language doc updates |
132
+ | `cortex prompt "..." -p "Page"` | Target specific page(s) |
133
+ | `cortex status` | Show connection and config status |
134
+ | `cortex analyze` | One-shot diff analysis (no Notion writes) |
135
+ | `cortex embed` | Rebuild the FAISS embedding index |
136
+ | `cortex scan` | Discover existing Notion pages |
137
+ | `cortex scan --link <id>` | Link a specific Notion page |
138
+
139
+ ## How It Works
140
+
141
+ Cortex creates a `.cortex/` directory (gitignored) in your project repo that stores configuration, OAuth tokens, and the FAISS vector index. When you run the pipeline, five agents work in sequence:
142
+
143
+ ```mermaid
144
+ graph TD
145
+ START([Start]) --> CA[CodeAnalyzer]
146
+ CA -->|Has analysis?| SF[SemanticFinder]
147
+ CA -->|No changes| END1([End])
148
+ SF --> DW[DocWriter]
149
+ DW --> TC[TaskCreator]
150
+ TC -->|Has updates?| SR[SprintReporter]
151
+ TC -->|Nothing to report| END2([End])
152
+ SR --> END3([End])
153
+
154
+ style CA fill:#4A90D9,color:#fff
155
+ style SF fill:#7B68EE,color:#fff
156
+ style DW fill:#50C878,color:#fff
157
+ style TC fill:#FFB347,color:#fff
158
+ style SR fill:#FF6B6B,color:#fff
159
+ ```
160
+
161
+ 1. **CodeAnalyzer** — Parses git diffs (or scans the full codebase) and produces a structured analysis of what changed
162
+ 2. **SemanticFinder** — Embeds the analysis and searches the FAISS index to find semantically related code chunks
163
+ 3. **DocWriter** — Fetches current Notion pages, generates section-level updates, and merges them deterministically
164
+ 4. **TaskCreator** — Identifies undocumented areas and creates task pages in Notion
165
+ 5. **SprintReporter** — Synthesizes all activity into a weekly sprint summary
166
+
167
+ ## Architecture
168
+
169
+ For detailed architecture documentation, see [`docs/architecture.md`](docs/architecture.md).
170
+
171
+ ## Per-Repo Configuration
172
+
173
+ ```
174
+ your-project/
175
+ ├── .cortex/ # Created by cortex init (gitignored)
176
+ │ ├── .env # LLM provider, API keys
177
+ │ ├── .gitignore # Ignores everything in .cortex/
178
+ │ ├── notion_tokens.json # OAuth tokens (auto-refreshed)
179
+ │ ├── page_cache.json # Tracked Notion pages
180
+ │ └── faiss_index/ # Vector embeddings
181
+ │ ├── index.faiss
182
+ │ └── chunks.json
183
+ ├── src/
184
+ └── ...
185
+ ```
186
+
187
+ ## Supported LLM Providers
188
+
189
+ | Provider | Models | Config Key |
190
+ |----------|--------|------------|
191
+ | Google Gemini | gemini-2.5-flash-lite, gemini-3-flash-preview, gemini-2.5-pro | `GOOGLE_API_KEY` |
192
+ | Anthropic | claude-sonnet-4, claude-haiku-4.5 | `ANTHROPIC_API_KEY` |
193
+ | OpenRouter | Any model via OpenRouter | `OPENROUTER_API_KEY` |
194
+
195
+ ## Documentation
196
+
197
+ | Document | Description |
198
+ |----------|-------------|
199
+ | [Architecture](docs/architecture.md) | System design, data flow, agent pipeline |
200
+ | [CLI Reference](docs/cli-reference.md) | All commands, options, and examples |
201
+ | [Agents](docs/agents.md) | How each agent works |
202
+ | [Configuration](docs/configuration.md) | Setup, LLM providers, environment variables |
203
+ | [Notion Integration](docs/notion-integration.md) | OAuth flow, MCP protocol, page management |
204
+ | [Embeddings & Search](docs/embeddings.md) | FAISS index, semantic search, HDBSCAN clustering |
205
+ | [Contributing](docs/contributing.md) | Development setup, testing, project structure |
206
+
207
+ ## License
208
+
209
+ MIT
@@ -0,0 +1,37 @@
1
+ codebase_cortex/__init__.py,sha256=hKMkN2o22KHG_Z-Y9AaJPaMkEZs-YF1opIXlUzxNpw4,112
2
+ codebase_cortex/cli.py,sha256=BU0HPfQ3KS6tVFr_WLGOq5xa3rOTVrHBZjnXUdO77B0,31640
3
+ codebase_cortex/config.py,sha256=zQIlCrXRTGf0rW26ngfweNKpvSIJPZ9GpmknYXcrct8,4489
4
+ codebase_cortex/graph.py,sha256=wsW0OkYscKbecO-TmG7RdQwNxAbUM92v_4uqiSDf5p4,3491
5
+ codebase_cortex/mcp_client.py,sha256=jNzGUBXb0OYoA11yv72ho14LU2LzCudZIQuJkpos1m8,3004
6
+ codebase_cortex/state.py,sha256=4BnASFMZU2oEAMp-k4qtmgP3SSpUUdG6irEYg4b54LI,1700
7
+ codebase_cortex/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ codebase_cortex/agents/base.py,sha256=skj0FHxKvjFPzHjy7XUr3RJe7d2OqEPjOC5nh-WBKc8,2383
9
+ codebase_cortex/agents/code_analyzer.py,sha256=f_8kzftye4xOrvSq0B5s5oO-r1WgahV5kkcnf6ljULE,4537
10
+ codebase_cortex/agents/doc_writer.py,sha256=ohRf_R80pMocak3hkK1GSmz42-AwXYY-H4p5pos2Qhg,14895
11
+ codebase_cortex/agents/semantic_finder.py,sha256=uxoBu94evAqiUHEyEGQbZP3ICKMBdIeZZYEseDf9Tis,2248
12
+ codebase_cortex/agents/sprint_reporter.py,sha256=t3nyrJnjhZEDXrJwpK--7NbpNSs840V_gw2FL1p070A,5852
13
+ codebase_cortex/agents/task_creator.py,sha256=NGfC4Zg37GUx4sNsW6MZK7kUdTLFs-C3q2D9c7xBMNg,5336
14
+ codebase_cortex/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ codebase_cortex/auth/callback_server.py,sha256=E15ivNx1nWpt7RY9gsMaaiDKqlCg5HDofPFeugcVEq0,2694
16
+ codebase_cortex/auth/oauth.py,sha256=CPD94odyS0YiXwjaxbHdXKI9jI3vpw1G_1GoW59ra4A,5257
17
+ codebase_cortex/auth/token_store.py,sha256=a4X99UccYbkd_q3yQhRUjGJRL6KWp5ydO74e3pFaEDo,2524
18
+ codebase_cortex/embeddings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ codebase_cortex/embeddings/clustering.py,sha256=oTAU71DCHEcs44cdUmjm4Ag6uruH8vgbXVoZp1kbHBQ,4496
20
+ codebase_cortex/embeddings/indexer.py,sha256=4zxLKMy1Y4vz4vxsrJ8lNDK8SCZCEbiq4YXtwAkij_s,7012
21
+ codebase_cortex/embeddings/store.py,sha256=ZHn-Uo2f14WkcRFBD4QIQi9vJLd1M81NMp6oQMxflOc,3953
22
+ codebase_cortex/git/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ codebase_cortex/git/diff_parser.py,sha256=kujnB1dcW0L-eL_KnzrpC4-I-Rl8pHvwxD8201q2Saw,5487
24
+ codebase_cortex/git/github_client.py,sha256=_5yDz4dgwtQTss6qBlW0_7Py_RZixRNeJVd_WcFrN3c,1279
25
+ codebase_cortex/notion/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ codebase_cortex/notion/bootstrap.py,sha256=2L0l90IEej272cfHgb4MrsYNc2PvDNfbqWLloCSs5H4,10616
27
+ codebase_cortex/notion/page_cache.py,sha256=JSgf51gWwnCUbNLwNRPq1sr1cCDhPDLBljjXFGqXd7g,3507
28
+ codebase_cortex/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
+ codebase_cortex/utils/json_parsing.py,sha256=zDruntjXKwGUrbHe5kYmaMj9ZaT3WLXlqlSmpjiKePQ,1678
30
+ codebase_cortex/utils/logging.py,sha256=rSWZFXGtXY7Nz00onrankTaYQdhXBXIO7pwoDGBQKqM,1569
31
+ codebase_cortex/utils/rate_limiter.py,sha256=e8ijgOJAiG2_EKprVrxG-6yPrcXcbKg0GDkCxiijt64,1665
32
+ codebase_cortex/utils/section_parser.py,sha256=BUrXmbJUPGRGSaZ9_YyG91Hvq9-C6Z_XIaQWrQAiTWM,4496
33
+ codebase_cortex-0.1.0.dist-info/METADATA,sha256=YG8MNvbPWMpay0zxIigaMjGmTq9R0XAsO5LV9Gn-moI,8236
34
+ codebase_cortex-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
35
+ codebase_cortex-0.1.0.dist-info/entry_points.txt,sha256=KfBJoRID7C0JYztS5F6bbL7PGpXcdu1yiwKf1oJzzQk,93
36
+ codebase_cortex-0.1.0.dist-info/licenses/LICENSE,sha256=qAL3B2b4WSnYV-A1VouD_sPw3PSzwBtVIKSCSil0lHs,1076
37
+ codebase_cortex-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ codebase-cortex = codebase_cortex.cli:cli
3
+ cortex = codebase_cortex.cli:cli
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Sarupuri Sai Lalith
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.