fh-matui 0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,59 @@
1
+ """Utilities to help with integration of BeerCSS with FastHTML"""
2
+
3
+ # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/00_foundations.ipynb.
4
+
5
+ # %% auto 0
6
+ __all__ = ['VEnum', 'stringify', 'normalize_tokens', 'dedupe_preserve_order']
7
+
8
+ # %% ../nbs/00_foundations.ipynb 2
9
+ from typing import Any, Iterable, Optional, Union
10
+ from enum import Enum
11
+ from fastcore.utils import *
12
+ from nbdev.showdoc import show_doc
13
+
14
+ # %% ../nbs/00_foundations.ipynb 4
15
+ #| code-fold: true
16
+ class VEnum(Enum):
17
+ """Enum with string conversion and concatenation support"""
18
+ def __str__(self): return self.value
19
+ def __add__(self, other): return stringify((self, other))
20
+ def __radd__(self, other): return stringify((other, self))
21
+
22
+ # %% ../nbs/00_foundations.ipynb 8
23
+ #| code-fold: true
24
+ def stringify(o):
25
+ """Converts input types into strings that can be passed to FT components"""
26
+ if is_listy(o):
27
+ return ' '.join(map(str, o)) if o else ""
28
+ return str(o)
29
+
30
+ # %% ../nbs/00_foundations.ipynb 12
31
+ #| code-fold: true
32
+ def normalize_tokens(cls):
33
+ """Normalize class input to list of string tokens"""
34
+ if cls is None:
35
+ return []
36
+ if isinstance(cls, str):
37
+ return cls.split()
38
+ if isinstance(cls, Enum):
39
+ return [str(cls)]
40
+ if is_listy(cls):
41
+ tokens = []
42
+ for item in cls:
43
+ if isinstance(item, str):
44
+ tokens.extend(item.split())
45
+ elif isinstance(item, Enum):
46
+ tokens.append(str(item))
47
+ return tokens
48
+ return []
49
+
50
+
51
+ def dedupe_preserve_order(tokens):
52
+ """Remove duplicates while preserving order"""
53
+ seen = set()
54
+ result = []
55
+ for token in tokens:
56
+ if token not in seen:
57
+ seen.add(token)
58
+ result.append(token)
59
+ return result