funcsnap 0.0.1.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
funcsnap/__init__.py ADDED
@@ -0,0 +1,12 @@
1
+ """Extract and reconstruct Python function source and dependencies."""
2
+
3
+ from importlib.metadata import PackageNotFoundError, version
4
+
5
+ from funcsnap.save_source_function import get_source_functions, reconstruct
6
+
7
+ __all__ = ["__version__", "get_source_functions", "reconstruct"]
8
+
9
+ try:
10
+ __version__ = version("funcsnap")
11
+ except PackageNotFoundError:
12
+ __version__ = "0.0.0"
@@ -0,0 +1,507 @@
1
+ """Module for analyzing functions and extracting their dependencies."""
2
+
3
+ import ast
4
+ import inspect
5
+ import os
6
+ import re
7
+ import site
8
+ import sysconfig
9
+ import textwrap
10
+ from typing import Any
11
+
12
+
13
+ def _get_env_dirs() -> list[str]:
14
+ """Return absolute paths of Python environment directories."""
15
+ dirs: set[str] = set()
16
+ for path in sysconfig.get_paths().values():
17
+ if path:
18
+ dirs.add(os.path.abspath(path))
19
+ try:
20
+ for path in site.getsitepackages():
21
+ dirs.add(os.path.abspath(path))
22
+ except AttributeError:
23
+ pass
24
+ try:
25
+ dirs.add(os.path.abspath(site.getusersitepackages()))
26
+ except AttributeError:
27
+ pass
28
+ return list(dirs)
29
+
30
+
31
+ _ENV_DIRS: list[str] = _get_env_dirs()
32
+
33
+
34
+ def _is_under(path: str, directory: str) -> bool:
35
+ """Return True if path is inside directory."""
36
+ return path == directory or path.startswith(directory + os.sep)
37
+
38
+
39
+ class FunctionAnalyzer:
40
+ """
41
+ Analyzes a function and extracts all used functions,
42
+ variables, classes, and modules.
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ exclude_dirs: list[str] | None = None,
48
+ include_dirs: list[str] | None = None,
49
+ ):
50
+ self.results: dict[str, dict[str, Any]] = {}
51
+ self.visited: set[str] = set()
52
+ self._exclude_dirs = [os.path.abspath(d) for d in (exclude_dirs or [])]
53
+ self._include_dirs = (
54
+ [os.path.abspath(d) for d in include_dirs]
55
+ if include_dirs is not None
56
+ else None
57
+ )
58
+
59
+ def _should_analyze(self, file_path: str) -> bool:
60
+ """Return True if we should recurse into this file."""
61
+ if not file_path or file_path == "unknown":
62
+ return False
63
+ abs_path = os.path.abspath(file_path)
64
+ for env_dir in _ENV_DIRS:
65
+ if _is_under(abs_path, env_dir):
66
+ return False
67
+ for excl_dir in self._exclude_dirs:
68
+ if _is_under(abs_path, excl_dir):
69
+ return False
70
+ if self._include_dirs is not None:
71
+ return any(_is_under(abs_path, d) for d in self._include_dirs)
72
+ return True
73
+
74
+ def analyze_function(self, func) -> dict[str, dict[str, Any]]:
75
+ """Analyze a function and return a dictionary of all used symbols."""
76
+ self.results = {}
77
+ self.visited = set()
78
+ self._analyze_func(func)
79
+ return self.results
80
+
81
+ def _analyze_func(self, func):
82
+ """Recursively analyze a function and its dependencies."""
83
+ if not (inspect.isfunction(func) or inspect.ismethod(func)):
84
+ return
85
+ func_id = self._get_func_id(func)
86
+ if func_id in self.visited:
87
+ return
88
+ self.visited.add(func_id)
89
+
90
+ file_path = self._get_file(func)
91
+ source = self._get_source(func)
92
+ used_names = self._get_used_names(source)
93
+ globals_dict = (
94
+ func.__globals__ if inspect.isfunction(func) else func.__func__.__globals__
95
+ )
96
+
97
+ self.results[func_id] = {
98
+ "value": source,
99
+ "type": "function",
100
+ "file": file_path,
101
+ "external_vars": [],
102
+ "imports": self._extract_needed_imports(file_path, used_names),
103
+ }
104
+ self.results[func_id]["external_vars"] = self._collect_deps(
105
+ used_names, globals_dict, func.__module__, source
106
+ )
107
+
108
+ def _collect_deps(
109
+ self,
110
+ used_names: set[str],
111
+ globals_dict: dict,
112
+ module_name: str,
113
+ source: str = "",
114
+ ) -> list[str]:
115
+ """Process used names, populate results, and return dep ID list."""
116
+ dep_ids: list[str] = []
117
+ seen: set[str] = set()
118
+
119
+ for name in used_names:
120
+ if name not in globals_dict:
121
+ continue
122
+ value = globals_dict[name]
123
+
124
+ if inspect.isfunction(value):
125
+ dep_id = self._get_func_id(value)
126
+ if dep_id not in seen:
127
+ seen.add(dep_id)
128
+ dep_ids.append(dep_id)
129
+ if self._should_analyze(self._get_file(value)):
130
+ self._analyze_func(value)
131
+
132
+ elif inspect.isclass(value):
133
+ dep_id = self._get_class_id(value)
134
+ if dep_id not in seen:
135
+ seen.add(dep_id)
136
+ dep_ids.append(dep_id)
137
+ if dep_id not in self.results and self._should_analyze(
138
+ self._get_file(value)
139
+ ):
140
+ self._analyze_class(value)
141
+
142
+ elif inspect.ismodule(value):
143
+ mod_id = value.__name__
144
+ if mod_id not in seen:
145
+ seen.add(mod_id)
146
+ dep_ids.append(mod_id)
147
+ # Resolve `name.attr` accesses (e.g. `prim.clamp`) and recurse
148
+ # into whatever they resolve to in the module.
149
+ if source:
150
+ for attr in _attr_accesses(source, name):
151
+ attr_val = getattr(value, attr, None)
152
+ if attr_val is None:
153
+ continue
154
+ if inspect.isfunction(attr_val):
155
+ sub_id = self._get_func_id(attr_val)
156
+ if sub_id not in seen:
157
+ seen.add(sub_id)
158
+ dep_ids.append(sub_id)
159
+ if self._should_analyze(self._get_file(attr_val)):
160
+ self._analyze_func(attr_val)
161
+ elif inspect.isclass(attr_val):
162
+ sub_id = self._get_class_id(attr_val)
163
+ if sub_id not in seen:
164
+ seen.add(sub_id)
165
+ dep_ids.append(sub_id)
166
+ if sub_id not in self.results and self._should_analyze(
167
+ self._get_file(attr_val)
168
+ ):
169
+ self._analyze_class(attr_val)
170
+
171
+ else:
172
+ var_id = f"{module_name}.{name}"
173
+ if var_id not in self.results:
174
+ self.results[var_id] = {
175
+ "type": "variable",
176
+ "value": repr(value),
177
+ }
178
+ if var_id not in seen:
179
+ seen.add(var_id)
180
+ dep_ids.append(var_id)
181
+
182
+ return dep_ids
183
+
184
+ def _analyze_class(self, cls):
185
+ """Add a class to results and collect method dependencies."""
186
+ cls_id = self._get_class_id(cls)
187
+ cls_file = self._get_file(cls)
188
+ cls_source = self._get_source(cls)
189
+ cls_used = self._get_used_names(cls_source)
190
+
191
+ # Set entry before recursing to prevent re-entry
192
+ self.results[cls_id] = {
193
+ "value": cls_source,
194
+ "type": "class",
195
+ "file": cls_file,
196
+ "external_vars": [],
197
+ "imports": self._extract_needed_imports(cls_file, cls_used),
198
+ }
199
+
200
+ if not self._should_analyze(cls_file):
201
+ return
202
+
203
+ # Collect deps from all methods without adding methods as separate entries
204
+ all_dep_ids: list[str] = []
205
+ seen_deps: set[str] = set()
206
+ for _, method in inspect.getmembers(cls, predicate=inspect.isfunction):
207
+ method_source = self._get_source(method)
208
+ method_used = self._get_used_names(method_source)
209
+ for dep_id in self._collect_deps(
210
+ method_used, method.__globals__, method.__module__, method_source
211
+ ):
212
+ if dep_id not in seen_deps:
213
+ seen_deps.add(dep_id)
214
+ all_dep_ids.append(dep_id)
215
+
216
+ self.results[cls_id]["external_vars"] = all_dep_ids
217
+
218
+ def _extract_needed_imports(
219
+ self, file_path: str, used_names: set[str]
220
+ ) -> list[str]:
221
+ """Extract top-level imports from file whose bound names appear in used_names"""
222
+ if not file_path or file_path == "unknown":
223
+ return []
224
+ try:
225
+ source = open(file_path).read()
226
+ tree = ast.parse(source)
227
+ except Exception:
228
+ return []
229
+
230
+ imports: list[str] = []
231
+ for node in ast.iter_child_nodes(tree):
232
+ if isinstance(node, ast.Import):
233
+ bound = [alias.asname or alias.name for alias in node.names]
234
+ if any(n in used_names for n in bound):
235
+ text = ast.get_source_segment(source, node)
236
+ if text:
237
+ imports.append(text.strip())
238
+ elif isinstance(node, ast.ImportFrom):
239
+ bound = [alias.asname or alias.name for alias in node.names]
240
+ if any(n in used_names for n in bound):
241
+ text = ast.get_source_segment(source, node)
242
+ if text:
243
+ imports.append(text.strip())
244
+ return imports
245
+
246
+ def _get_func_id(self, func) -> str:
247
+ return f"{func.__module__}.{func.__name__}"
248
+
249
+ def _get_class_id(self, cls) -> str:
250
+ return f"{cls.__module__}.{cls.__name__}"
251
+
252
+ def _get_source(self, obj) -> str:
253
+ try:
254
+ return inspect.getsource(obj)
255
+ except Exception:
256
+ return ""
257
+
258
+ def _get_file(self, obj) -> str:
259
+ try:
260
+ return inspect.getfile(obj)
261
+ except Exception:
262
+ return "unknown"
263
+
264
+ def _get_used_names(self, source: str) -> set[str]:
265
+ """Parse AST and collect all referenced names in the source."""
266
+ used: set[str] = set()
267
+ try:
268
+ tree = ast.parse(textwrap.dedent(source))
269
+ for node in ast.walk(tree):
270
+ if isinstance(node, ast.Name) and isinstance(node.ctx, ast.Load):
271
+ used.add(node.id)
272
+ elif isinstance(node, ast.Attribute) and isinstance(node.ctx, ast.Load):
273
+ base = node
274
+ while isinstance(base, ast.Attribute):
275
+ base = base.value
276
+ if isinstance(base, ast.Name):
277
+ used.add(base.id)
278
+ except Exception:
279
+ pass
280
+ return used
281
+
282
+
283
+ def get_source_functions(
284
+ func,
285
+ exclude_dirs: list[str] | None = None,
286
+ include_dirs: list[str] | None = None,
287
+ ) -> dict[str, dict[str, Any]]:
288
+ """Analyze a function and return a dictionary of all used symbols."""
289
+ analyzer = FunctionAnalyzer(exclude_dirs=exclude_dirs, include_dirs=include_dirs)
290
+ return analyzer.analyze_function(func)
291
+
292
+
293
+ def _import_bound_names(import_line: str) -> set[str]:
294
+ """Return the set of names bound by an import statement string."""
295
+ try:
296
+ tree = ast.parse(import_line)
297
+ except SyntaxError:
298
+ return set()
299
+ names: set[str] = set()
300
+ for node in ast.walk(tree):
301
+ if isinstance(node, (ast.Import, ast.ImportFrom)):
302
+ for alias in node.names:
303
+ names.add(alias.asname or alias.name.split(".")[0])
304
+ return names
305
+
306
+
307
+ def _is_relative_import(import_line: str) -> bool:
308
+ """Return True if the import line is a relative (intra-package) import."""
309
+ try:
310
+ tree = ast.parse(import_line)
311
+ for node in ast.walk(tree):
312
+ if isinstance(node, ast.ImportFrom) and node.level > 0:
313
+ return True
314
+ except SyntaxError:
315
+ pass
316
+ return False
317
+
318
+
319
+ def _attr_accesses(source: str, name: str) -> set[str]:
320
+ """
321
+ Return attribute names accessed on `name` in source
322
+ (e.g. prim.clamp → {'clamp'}).
323
+
324
+ """
325
+ attrs: set[str] = set()
326
+ try:
327
+ tree = ast.parse(textwrap.dedent(source))
328
+ for node in ast.walk(tree):
329
+ if (
330
+ isinstance(node, ast.Attribute)
331
+ and isinstance(node.value, ast.Name)
332
+ and node.value.id == name
333
+ ):
334
+ attrs.add(node.attr)
335
+ except Exception:
336
+ pass
337
+ return attrs
338
+
339
+
340
+ def _module_slug(full_key: str) -> str:
341
+ """'mylib.core.primitives.scale' → 'mylib_core_primitives'."""
342
+ return full_key.rsplit(".", 1)[0].replace(".", "_")
343
+
344
+
345
+ def _prefixed_name(full_key: str) -> str:
346
+ """'mylib.core.primitives.scale' → '_mylib_core_primitives__scale'."""
347
+ return f"_{_module_slug(full_key)}__{full_key.rsplit('.', 1)[-1]}"
348
+
349
+
350
+ def reconstruct(results: dict[str, dict[str, Any]]) -> str:
351
+ """Produce a single executable Python string from get_source_functions() output."""
352
+ # 1. Separate variables from functions/classes
353
+ variables = {k: v for k, v in results.items() if v.get("type") == "variable"}
354
+ definitions = {
355
+ k: v for k, v in results.items() if v.get("type") in ("function", "class")
356
+ }
357
+
358
+ # 2. Detect name collisions: multiple result keys sharing the same short name
359
+ name_to_keys: dict[str, list[str]] = {}
360
+ for key in definitions:
361
+ name_to_keys.setdefault(key.split(".")[-1], []).append(key)
362
+ colliding_names: set[str] = {n for n, ks in name_to_keys.items() if len(ks) > 1}
363
+
364
+ # Map every key to the name it will be emitted under
365
+ key_to_emitted: dict[str, str] = {
366
+ key: (
367
+ _prefixed_name(key)
368
+ if key.split(".")[-1] in colliding_names
369
+ else key.split(".")[-1]
370
+ )
371
+ for key in definitions
372
+ }
373
+
374
+ # The set of names that will be visible at module level after emit
375
+ defined_names: set[str] = set(key_to_emitted.values()) | set(colliding_names)
376
+
377
+ # All definition source texts — used to scan for module attribute accesses
378
+ def_sources: list[str] = [v["value"] for v in definitions.values()]
379
+
380
+ # 3. Classify import lines
381
+ all_imports: list[str] = []
382
+ module_alias_shims: dict[str, set[str]] = {} # alias → attr names to expose
383
+ seen_imports: set[str] = set()
384
+
385
+ for entry in results.values():
386
+ for imp in entry.get("imports", []):
387
+ if imp in seen_imports:
388
+ continue
389
+ seen_imports.add(imp)
390
+ bound = _import_bound_names(imp)
391
+ # Drop if every bound name will be defined in the output
392
+ if bound and bound.issubset(defined_names):
393
+ continue
394
+ # Relative imports can't be resolved in an exec() context
395
+ if _is_relative_import(imp):
396
+ for alias in bound:
397
+ if alias not in defined_names and alias not in module_alias_shims:
398
+ attrs_used: set[str] = set()
399
+ for src in def_sources:
400
+ attrs_used |= _attr_accesses(src, alias)
401
+ known = attrs_used & defined_names
402
+ if known:
403
+ module_alias_shims[alias] = known
404
+ continue # never emit relative imports verbatim
405
+ all_imports.append(imp)
406
+
407
+ # 4. Topological sort: dependencies before dependents (Kahn's algorithm)
408
+ def_keys = set(definitions.keys())
409
+ in_degree = {k: 0 for k in def_keys}
410
+ successors: dict[str, list[str]] = {k: [] for k in def_keys}
411
+
412
+ for key, entry in definitions.items():
413
+ for dep in entry.get("external_vars", []):
414
+ if dep in def_keys:
415
+ in_degree[key] += 1
416
+ successors[dep].append(key)
417
+
418
+ queue = sorted(k for k in def_keys if in_degree[k] == 0)
419
+ sorted_defs: list[str] = []
420
+ while queue:
421
+ node = queue.pop(0)
422
+ sorted_defs.append(node)
423
+ for succ in sorted(successors[node]):
424
+ in_degree[succ] -= 1
425
+ if in_degree[succ] == 0:
426
+ queue.append(succ)
427
+ queue.sort()
428
+
429
+ # Append any remaining (cycle fallback)
430
+ sorted_defs.extend(sorted(k for k in def_keys if k not in sorted_defs))
431
+
432
+ # 5. Build output string
433
+ parts: list[str] = []
434
+
435
+ if all_imports or module_alias_shims:
436
+ parts.append("# --- Imports ---")
437
+ if module_alias_shims:
438
+ parts.append("import types")
439
+ parts.extend(all_imports)
440
+ parts.append("")
441
+
442
+ if variables:
443
+ parts.append("# --- Variables ---")
444
+ for key, entry in variables.items():
445
+ var_name = key.split(".")[-1]
446
+ parts.append(f"{var_name} = {entry['value']}")
447
+ parts.append("")
448
+
449
+ if sorted_defs:
450
+ parts.append("# --- Definitions ---")
451
+ for key in sorted_defs:
452
+ source = textwrap.dedent(definitions[key]["value"]).strip()
453
+ emitted = key_to_emitted[key]
454
+ orig = key.split(".")[-1]
455
+ if emitted != orig:
456
+ # Rename the first def/class line to the prefixed name
457
+ source = re.sub(
458
+ rf"^((?:async\s+)?def|class)\s+{re.escape(orig)}\b",
459
+ rf"\1 {emitted}",
460
+ source,
461
+ count=1,
462
+ )
463
+ parts.append(source)
464
+ parts.append("")
465
+
466
+ # SimpleNamespace shims for module aliases
467
+ # (e.g. `prim = types.SimpleNamespace(clamp=clamp)`)
468
+ # Emitted after definitions because they reference already-defined names.
469
+ if module_alias_shims:
470
+ parts.append("# --- Module shims ---")
471
+ for alias in sorted(module_alias_shims):
472
+ attrs = sorted(module_alias_shims[alias])
473
+ # Attrs may themselves have been prefixed due to collision;
474
+ # use key_to_emitted by looking up
475
+ # the short attr name in the emitted names map
476
+ resolved_attrs = []
477
+ for a in attrs:
478
+ # Find what name 'a' is actually emitted as
479
+ emitted_a = next(
480
+ (key_to_emitted[k] for k in definitions if k.split(".")[-1] == a),
481
+ a,
482
+ )
483
+ resolved_attrs.append(f"{a}={emitted_a}")
484
+ attr_str = ", ".join(resolved_attrs)
485
+ parts.append(f"{alias} = types.SimpleNamespace({attr_str})")
486
+ parts.append("")
487
+
488
+ # 6. Collision aliases — emit after all defs and shims
489
+ if colliding_names:
490
+ parts.append("# --- Collision aliases ---")
491
+ for short_name in sorted(colliding_names):
492
+ ordered_keys = [k for k in sorted_defs if k.split(".")[-1] == short_name]
493
+ if not ordered_keys:
494
+ continue
495
+ canonical = ordered_keys[0]
496
+ canonical_mod = canonical.rsplit(".", 1)[0]
497
+ parts.append(
498
+ f"# '{short_name}': {len(ordered_keys)} versions — "
499
+ f"canonical from {canonical_mod}"
500
+ )
501
+ parts.append(f"{short_name} = {key_to_emitted[canonical]}")
502
+ for k in ordered_keys[1:]:
503
+ slug = _module_slug(k)
504
+ parts.append(f"{short_name}__{slug} = {key_to_emitted[k]}")
505
+ parts.append("")
506
+
507
+ return "\n".join(parts)
@@ -0,0 +1,59 @@
1
+ Metadata-Version: 2.4
2
+ Name: funcsnap
3
+ Version: 0.0.1.dev1
4
+ Summary: Analyze Python functions and extract dependency source for reconstruction
5
+ Project-URL: Repository, https://github.com/yourusername/funcsnap
6
+ Author: funcsnap contributors
7
+ License-Expression: MIT
8
+ License-File: LICENSE
9
+ Keywords: ast,introspection,serialization,source
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Programming Language :: Python :: 3.13
18
+ Classifier: Typing :: Typed
19
+ Requires-Python: >=3.10
20
+ Provides-Extra: dev
21
+ Requires-Dist: build>=1.2; extra == 'dev'
22
+ Requires-Dist: numpy>=1.26; extra == 'dev'
23
+ Requires-Dist: pytest>=8; extra == 'dev'
24
+ Requires-Dist: ruff>=0.8; extra == 'dev'
25
+ Requires-Dist: twine>=5; extra == 'dev'
26
+ Description-Content-Type: text/markdown
27
+
28
+ # funcsnap
29
+
30
+ Analyze Python functions, walk their dependencies, and **reconstruct** a self-contained source string suitable for `exec()`.
31
+
32
+ ## Install
33
+
34
+ ```bash
35
+ pip install funcsnap
36
+ ```
37
+
38
+ ## Development
39
+
40
+ Clone the repo, create a virtual environment, then:
41
+
42
+ ```bash
43
+ pip install -e ".[dev]"
44
+ ruff check .
45
+ pytest
46
+ ```
47
+
48
+ Version strings are derived from Git tags ([hatch-vcs](https://github.com/ofek/hatch-vcs)). Without a repository or tags, builds use the fallback version from `pyproject.toml`.
49
+
50
+ ### PyPI releases (maintainers)
51
+
52
+ 1. Configure **trusted publishing** on [PyPI](https://pypi.org/manage/account/publishing/) for this repository and workflow `.github/workflows/publish.yml` (optional GitHub Environment `pypi`).
53
+ 2. Merge your changes to `main`, then tag and push: `git tag v0.1.0 && git push origin v0.1.0` (use the next semantic version). The publish workflow runs on tags matching `v*`.
54
+
55
+ Update the `Repository` URL in `pyproject.toml` under `[project.urls]` to match your GitHub repo.
56
+
57
+ ## License
58
+
59
+ MIT — see [LICENSE](LICENSE).
@@ -0,0 +1,6 @@
1
+ funcsnap/__init__.py,sha256=ZiA7jSDJBK7H96rON6OOwffu6ncjtdY94NGmtPS6RHQ,375
2
+ funcsnap/save_source_function.py,sha256=-AYNwXxdnzwc_rO-Wr4JA7J5roYuZg5uDQWAoql9N4A,18702
3
+ funcsnap-0.0.1.dev1.dist-info/METADATA,sha256=FhwhmcpogNy-Oa3M6q5YKo93TzIEWyBQ1nA4YfRBiBY,2077
4
+ funcsnap-0.0.1.dev1.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
5
+ funcsnap-0.0.1.dev1.dist-info/licenses/LICENSE,sha256=-kRiz8ytzAGxYnzf5Uofpesw6lmHfjhtChFHmYTfz10,1078
6
+ funcsnap-0.0.1.dev1.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 funcsnap contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.