fred-oss 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fred_oss-0.1.0/MANIFEST.in +2 -0
- fred_oss-0.1.0/NOTICE.txt +13 -0
- fred_oss-0.1.0/PKG-INFO +42 -0
- fred_oss-0.1.0/README.md +21 -0
- fred_oss-0.1.0/requirements.txt +3 -0
- fred_oss-0.1.0/setup.cfg +4 -0
- fred_oss-0.1.0/setup.py +51 -0
- fred_oss-0.1.0/src/main/fred/cli/__init__.py +0 -0
- fred_oss-0.1.0/src/main/fred/cli/__main__.py +5 -0
- fred_oss-0.1.0/src/main/fred/cli/interface.py +67 -0
- fred_oss-0.1.0/src/main/fred/cli/main.py +26 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/__init__.py +11 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/cli_ext.py +25 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/runtime.py +59 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/runtimes/__init__.py +0 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/runtimes/scanner.py +16 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/runtimes/sync.py +55 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/wrappers/__init__.py +0 -0
- fred_oss-0.1.0/src/main/fred/integrations/databricks/wrappers/dbutils.py +145 -0
- fred_oss-0.1.0/src/main/fred/integrations/runpod/__init__.py +11 -0
- fred_oss-0.1.0/src/main/fred/integrations/runpod/cli_ext.py +48 -0
- fred_oss-0.1.0/src/main/fred/integrations/runpod/helper.py +48 -0
- fred_oss-0.1.0/src/main/fred/maturity.py +73 -0
- fred_oss-0.1.0/src/main/fred/settings.py +142 -0
- fred_oss-0.1.0/src/main/fred/utils/__init__.py +0 -0
- fred_oss-0.1.0/src/main/fred/utils/dateops.py +9 -0
- fred_oss-0.1.0/src/main/fred/utils/runtime.py +87 -0
- fred_oss-0.1.0/src/main/fred/version +1 -0
- fred_oss-0.1.0/src/main/fred/version.py +77 -0
- fred_oss-0.1.0/src/main/fred_oss.egg-info/PKG-INFO +42 -0
- fred_oss-0.1.0/src/main/fred_oss.egg-info/SOURCES.txt +33 -0
- fred_oss-0.1.0/src/main/fred_oss.egg-info/dependency_links.txt +1 -0
- fred_oss-0.1.0/src/main/fred_oss.egg-info/entry_points.txt +2 -0
- fred_oss-0.1.0/src/main/fred_oss.egg-info/requires.txt +1 -0
- fred_oss-0.1.0/src/main/fred_oss.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
Copyright (c) 2025 FAHERA.MX (CORE FAHERA ENTERPRISE HOLDINGS S DE RL DE CV)
|
|
2
|
+
|
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
you may not use this file except in compliance with the License.
|
|
5
|
+
You may obtain a copy of the License at
|
|
6
|
+
|
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
|
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
See the License for the specific language governing permissions and
|
|
13
|
+
limitations under the License.
|
fred_oss-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fred-oss
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: FREDOSS
|
|
5
|
+
Home-page: https://fred.fahera.mx
|
|
6
|
+
Author: Fahera Research, Education, and Development
|
|
7
|
+
Author-email: fred@fahera.mx
|
|
8
|
+
Requires-Python: >=3.12
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
License-File: NOTICE.txt
|
|
11
|
+
Requires-Dist: fire==0.7.1
|
|
12
|
+
Dynamic: author
|
|
13
|
+
Dynamic: author-email
|
|
14
|
+
Dynamic: description
|
|
15
|
+
Dynamic: description-content-type
|
|
16
|
+
Dynamic: home-page
|
|
17
|
+
Dynamic: license-file
|
|
18
|
+
Dynamic: requires-dist
|
|
19
|
+
Dynamic: requires-python
|
|
20
|
+
Dynamic: summary
|
|
21
|
+
|
|
22
|
+
# FREDOSS
|
|
23
|
+
|
|
24
|
+
This is the open-source baseline python package called `fred` package by `fred.fahera.mx` (Fahera's Research, Education, and Development Team).
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
```
|
|
29
|
+
$ pip install fred-oss
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
By default, the `fred-oss` package will only install the `default` dependencies. You can control which
|
|
33
|
+
dependency set to use via the 'dependency tags' via the following pattern:
|
|
34
|
+
|
|
35
|
+
```
|
|
36
|
+
$ pip install 'fred-oss[<tag-1>,<tag-2>,...]'
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
Where `<tag-i>` can be:
|
|
40
|
+
* `default`
|
|
41
|
+
* `all`
|
|
42
|
+
* ...
|
fred_oss-0.1.0/README.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# FREDOSS
|
|
2
|
+
|
|
3
|
+
This is the open-source baseline python package called `fred` package by `fred.fahera.mx` (Fahera's Research, Education, and Development Team).
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
$ pip install fred-oss
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
By default, the `fred-oss` package will only install the `default` dependencies. You can control which
|
|
12
|
+
dependency set to use via the 'dependency tags' via the following pattern:
|
|
13
|
+
|
|
14
|
+
```
|
|
15
|
+
$ pip install 'fred-oss[<tag-1>,<tag-2>,...]'
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
Where `<tag-i>` can be:
|
|
19
|
+
* `default`
|
|
20
|
+
* `all`
|
|
21
|
+
* ...
|
fred_oss-0.1.0/setup.cfg
ADDED
fred_oss-0.1.0/setup.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
from setuptools import setup, find_namespace_packages
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
CODEBASE_PATH = os.environ.get(
|
|
8
|
+
"CODEBASE_PATH",
|
|
9
|
+
default=os.path.join("src", "main"),
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
with open("requirements.txt", "r") as file:
|
|
13
|
+
requirements = [line for line in file.read().splitlines() if line and not line.startswith("#")]
|
|
14
|
+
|
|
15
|
+
version_filepath = os.path.join(CODEBASE_PATH, "fred", "version")
|
|
16
|
+
with open(version_filepath, "r") as file:
|
|
17
|
+
version = file.read().strip()
|
|
18
|
+
|
|
19
|
+
with open("README.md") as file:
|
|
20
|
+
readme = file.read()
|
|
21
|
+
|
|
22
|
+
setup(
|
|
23
|
+
name="fred-oss",
|
|
24
|
+
version=version,
|
|
25
|
+
description="FREDOSS",
|
|
26
|
+
long_description=readme,
|
|
27
|
+
long_description_content_type='text/markdown',
|
|
28
|
+
url="https://fred.fahera.mx",
|
|
29
|
+
author="Fahera Research, Education, and Development",
|
|
30
|
+
author_email="fred@fahera.mx",
|
|
31
|
+
packages=find_namespace_packages(where=CODEBASE_PATH),
|
|
32
|
+
package_dir={
|
|
33
|
+
"": CODEBASE_PATH
|
|
34
|
+
},
|
|
35
|
+
package_data={
|
|
36
|
+
"": [
|
|
37
|
+
version_filepath,
|
|
38
|
+
]
|
|
39
|
+
},
|
|
40
|
+
entry_points={
|
|
41
|
+
"console_scripts": [
|
|
42
|
+
"fred=fred.cli.main:CLI.cli_exec",
|
|
43
|
+
]
|
|
44
|
+
},
|
|
45
|
+
install_requires=requirements,
|
|
46
|
+
include_package_data=True,
|
|
47
|
+
# Python version should be aligned to the latest databricks LTS runtime at the moment.
|
|
48
|
+
# For more info: https://docs.databricks.com/aws/en/release-notes/runtime/
|
|
49
|
+
# Reference: Databricks Runtime 16.4 LTS Using Python 3.12.3
|
|
50
|
+
python_requires=">=3.12",
|
|
51
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import datetime as dt
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
|
|
6
|
+
from fred.settings import (
|
|
7
|
+
get_environ_variable,
|
|
8
|
+
logger_manager,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass(slots=True, frozen=True)
|
|
15
|
+
class AbstractCLI:
|
|
16
|
+
start_counter: float = field(default_factory=time.perf_counter)
|
|
17
|
+
start_ts: str = field(default_factory=dt.datetime.utcnow().isoformat)
|
|
18
|
+
|
|
19
|
+
def now(self, local: bool = False) -> str:
|
|
20
|
+
return (dt.datetime.utcnow() if not local else dt.datetime.now()).isoformat()
|
|
21
|
+
|
|
22
|
+
def environ(self, name: str) -> Optional[str]:
|
|
23
|
+
return get_environ_variable(
|
|
24
|
+
name=name,
|
|
25
|
+
enforce=False,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def runtime(
|
|
29
|
+
self,
|
|
30
|
+
include_modules: bool = False,
|
|
31
|
+
) -> dict:
|
|
32
|
+
from fred.utils.runtime import RuntimeInfo
|
|
33
|
+
|
|
34
|
+
return RuntimeInfo.auto().to_dict(exclude_modules=not include_modules)
|
|
35
|
+
|
|
36
|
+
def on_start(self):
|
|
37
|
+
logger.debug("CLI Method not implemented: on_start")
|
|
38
|
+
|
|
39
|
+
def on_finalize(self):
|
|
40
|
+
logger.debug("CLI method not implemented: on_finalize")
|
|
41
|
+
|
|
42
|
+
def __enter__(self) -> 'AbstractCLI':
|
|
43
|
+
self.on_start()
|
|
44
|
+
return self
|
|
45
|
+
|
|
46
|
+
def __exit__(self, *args):
|
|
47
|
+
self.on_finalize()
|
|
48
|
+
end_counter = time.perf_counter()
|
|
49
|
+
# TODO: Add warning logging if an error occurred
|
|
50
|
+
logger.debug(f"Command Timestamp Start: {self.start_ts}")
|
|
51
|
+
logger.debug(f"Command Timestamp Finalize: {self.now(local=False)}")
|
|
52
|
+
logger.info(f"Command Duration: {end_counter - self.start_counter}")
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
def default_config(cls, *args, **kwargs):
|
|
56
|
+
return cls(*args, **kwargs)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def cli_exec(cls, *args, **kwargs):
|
|
60
|
+
import fire
|
|
61
|
+
|
|
62
|
+
with cls.default_config(*args, **kwargs) as cli:
|
|
63
|
+
fire.Fire(cli)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class IntegrationExtCLI:
|
|
67
|
+
pass
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from fred.version import version
|
|
2
|
+
from fred.settings import logger_manager
|
|
3
|
+
from fred.cli.interface import AbstractCLI
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CLIExtensionGroups:
|
|
10
|
+
"""CLI Extensions providing access to various integrations by following a lazy loading pattern."""
|
|
11
|
+
|
|
12
|
+
@property
|
|
13
|
+
def databricks(self):
|
|
14
|
+
from fred.integrations.databricks.cli_ext import DatabricksExt
|
|
15
|
+
return DatabricksExt()
|
|
16
|
+
|
|
17
|
+
@property
|
|
18
|
+
def runpod(self):
|
|
19
|
+
from fred.integrations.runpod.cli_ext import RunPodExt
|
|
20
|
+
return RunPodExt()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class CLI(AbstractCLI, CLIExtensionGroups):
|
|
24
|
+
|
|
25
|
+
def version(self) -> str:
|
|
26
|
+
return version.value
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from fred.maturity import Maturity, MaturityLevel
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
module_maturity = Maturity(
|
|
5
|
+
level=MaturityLevel.ALPHA,
|
|
6
|
+
reference=__name__,
|
|
7
|
+
message=(
|
|
8
|
+
"Databricks integration is in early development "
|
|
9
|
+
"and therefore currently with incomplete and unstable features."
|
|
10
|
+
)
|
|
11
|
+
)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from fred.cli.interface import IntegrationExtCLI
|
|
4
|
+
from fred.settings import logger_manager
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class DatabricksExt(IntegrationExtCLI):
|
|
11
|
+
def sync_runtime(self, runtime: Optional[str] = None):
|
|
12
|
+
from fred.integrations.databricks.runtimes.sync import DatabricksRuntimeSyncHelper
|
|
13
|
+
|
|
14
|
+
sync_helper = DatabricksRuntimeSyncHelper.default()
|
|
15
|
+
match runtime:
|
|
16
|
+
case None:
|
|
17
|
+
logger.info("No runtime specified; skipping synchronization.")
|
|
18
|
+
case "all":
|
|
19
|
+
logger.info("Syncing all available Databricks runtimes.")
|
|
20
|
+
sync_helper.sync_all()
|
|
21
|
+
case runtime_name if isinstance(runtime_name, str):
|
|
22
|
+
logger.info(f"Syncing specified Databricks runtime: {runtime_name}")
|
|
23
|
+
sync_helper.sync(runtime=runtime_name)
|
|
24
|
+
case _:
|
|
25
|
+
raise ValueError(f"Invalid runtime value: {runtime}")
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from enum import StrEnum, auto
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass(frozen=True, slots=True)
|
|
7
|
+
class DatabricksRuntime:
|
|
8
|
+
python_version: str
|
|
9
|
+
databricks_runtime: str
|
|
10
|
+
libraries: list[dict]
|
|
11
|
+
|
|
12
|
+
@classmethod
|
|
13
|
+
def from_catalog(cls, catalog: 'DatabricksRuntimeCatalog') -> 'DatabricksRuntime':
|
|
14
|
+
config = catalog.get_configuration()
|
|
15
|
+
return cls(
|
|
16
|
+
python_version=config.get("python_version", "3.12.3"),
|
|
17
|
+
databricks_runtime=config.get("databricks_runtime", "16.4 LTS"),
|
|
18
|
+
libraries=config.get("libraries", []),
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DatabricksRuntimeCatalog(StrEnum):
|
|
23
|
+
LTS_16_4 = auto()
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def key(self) -> str:
|
|
27
|
+
runtime_type, runtime_num = self.name.split("_", 1)
|
|
28
|
+
return f"{runtime_num.replace('_', '.')} {runtime_type}"
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def url(self) -> str:
|
|
32
|
+
import posixpath
|
|
33
|
+
return posixpath.join(
|
|
34
|
+
"https://docs.databricks.com",
|
|
35
|
+
"aws/en/release-notes/runtime",
|
|
36
|
+
self.key.replace(" ", "").replace("_", "").lower(),
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def from_string(cls, name: str) -> 'DatabricksRuntimeCatalog':
|
|
41
|
+
formatted_name = name.replace(" ", "").replace(".", "_").upper()
|
|
42
|
+
return cls[formatted_name]
|
|
43
|
+
|
|
44
|
+
def get_filepath(self) -> str:
|
|
45
|
+
filename = f"{self.key.replace(' ', '').replace('.', '_')}.json"
|
|
46
|
+
return os.path.join(
|
|
47
|
+
os.path.dirname(__file__),
|
|
48
|
+
"runtimes",
|
|
49
|
+
filename,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
def get_configuration(self) -> dict:
|
|
53
|
+
import json
|
|
54
|
+
|
|
55
|
+
with open(self.get_filepath(), "r") as file:
|
|
56
|
+
return json.load(file)
|
|
57
|
+
|
|
58
|
+
def get_runtime(self) -> DatabricksRuntime:
|
|
59
|
+
return DatabricksRuntime.from_catalog(self)
|
|
File without changes
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
@dataclass(frozen=True, slots=True)
|
|
5
|
+
class DatabricksRuntimeScanner:
|
|
6
|
+
target_url: str
|
|
7
|
+
|
|
8
|
+
@classmethod
|
|
9
|
+
def default(cls) -> 'DatabricksRuntimeScanner':
|
|
10
|
+
return cls(
|
|
11
|
+
target_url="https://docs.databricks.com/aws/en/release-notes/runtime/"
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
def get_payload(self) -> list[dict]:
|
|
15
|
+
# TODO: Extract the runtime table as a json/dict payload
|
|
16
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Union
|
|
5
|
+
|
|
6
|
+
from fred.integrations.databricks.runtime import DatabricksRuntimeCatalog
|
|
7
|
+
from fred.settings import logger_manager
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
11
|
+
|
|
12
|
+
RUNTIME_DATATYPE = Union[DatabricksRuntimeCatalog, str]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass(frozen=True, slots=True)
|
|
16
|
+
class DatabricksRuntimeSyncHelper:
|
|
17
|
+
output_path: str
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def default(cls) -> 'DatabricksRuntimeSyncHelper':
|
|
21
|
+
return cls(
|
|
22
|
+
output_path=os.path.join(
|
|
23
|
+
os.path.dirname(__file__),
|
|
24
|
+
"runtimes",
|
|
25
|
+
)
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def get_payload(self, runtime: DatabricksRuntimeCatalog) -> list[dict]:
|
|
29
|
+
import requests
|
|
30
|
+
|
|
31
|
+
# Get the runtime content from the official documentation
|
|
32
|
+
response = requests.get(runtime.url)
|
|
33
|
+
if not response.ok:
|
|
34
|
+
raise ValueError(f"Failed to fetch runtime information from {runtime.url}")
|
|
35
|
+
content = response.text
|
|
36
|
+
|
|
37
|
+
# Parse the content to extract relevant information
|
|
38
|
+
# TODO: Implement the actual parsing logic
|
|
39
|
+
raise NotImplementedError
|
|
40
|
+
|
|
41
|
+
def sync(self, runtime: RUNTIME_DATATYPE):
|
|
42
|
+
if isinstance(runtime, str):
|
|
43
|
+
runtime = DatabricksRuntimeCatalog.from_string(runtime)
|
|
44
|
+
logger.info(f"Syncing Databricks Runtime: {runtime.name}")
|
|
45
|
+
payload = self.get_payload(runtime=runtime)
|
|
46
|
+
output_filepath = os.path.join(
|
|
47
|
+
self.output_path,
|
|
48
|
+
f"{runtime.name.replace(' ', '')}.json",
|
|
49
|
+
)
|
|
50
|
+
with open(output_filepath, "w") as file:
|
|
51
|
+
json.dump(payload, file, indent=4)
|
|
52
|
+
|
|
53
|
+
def sync_all(self):
|
|
54
|
+
for runtime in DatabricksRuntimeCatalog:
|
|
55
|
+
self.sync(runtime=runtime)
|
|
File without changes
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from functools import wraps
|
|
3
|
+
from typing import Any, Callable, Optional
|
|
4
|
+
|
|
5
|
+
from fred.settings import logger_manager
|
|
6
|
+
|
|
7
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# Note: We need to turn off frozen & slots here to allow dynamic attribute setting
|
|
11
|
+
# by the wraps decorator
|
|
12
|
+
@dataclass(frozen=False, slots=False)
|
|
13
|
+
class DBUtilsRetrievalMethod:
|
|
14
|
+
position: int
|
|
15
|
+
function: Callable
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def with_position(cls, value: int) -> Callable:
|
|
19
|
+
def decorator(function: Callable) -> 'DBUtilsRetrievalMethod':
|
|
20
|
+
instance = cls(position=value, function=function)
|
|
21
|
+
wraps(function)(instance)
|
|
22
|
+
return instance
|
|
23
|
+
return decorator
|
|
24
|
+
|
|
25
|
+
def __call__(self, *args, **kwargs) -> Callable:
|
|
26
|
+
return self.function(*args, **kwargs)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass(frozen=False, slots=True)
|
|
30
|
+
class DBUtilsFinder:
|
|
31
|
+
instance: Optional[Any] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
@DBUtilsRetrievalMethod.with_position(value=0)
|
|
36
|
+
def get_dbutils_from_python_context(**kwargs):
|
|
37
|
+
try:
|
|
38
|
+
return (
|
|
39
|
+
locals().get("dbutils")
|
|
40
|
+
or globals().get("dbutils")
|
|
41
|
+
)
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logger.warning(f"Failed to get dbutils from python context: {e}")
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
@staticmethod
|
|
47
|
+
@DBUtilsRetrievalMethod.with_position(value=1)
|
|
48
|
+
def get_dbutils_from_sdk_context(**kwargs):
|
|
49
|
+
try:
|
|
50
|
+
from databricks.sdk.runtime import dbutils
|
|
51
|
+
return dbutils
|
|
52
|
+
except ImportError:
|
|
53
|
+
from databricks.sdk.runtime import get_dbutils
|
|
54
|
+
return get_dbutils()
|
|
55
|
+
except Exception as e:
|
|
56
|
+
logger.warning(f"Failed to get dbutils from SDK context: {e}")
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
@DBUtilsRetrievalMethod.with_position(value=2)
|
|
61
|
+
def get_dbutils_from_workspace_client(client: Optional['WorkspaceClient'] = None, **kwargs):
|
|
62
|
+
try:
|
|
63
|
+
if client is None:
|
|
64
|
+
from databricks.sdk import WorkspaceClient
|
|
65
|
+
client = WorkspaceClient()
|
|
66
|
+
return client.dbutils
|
|
67
|
+
except Exception as e:
|
|
68
|
+
logger.warning(f"Failed to get dbutils from workspace client: {e}")
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
@staticmethod
|
|
72
|
+
@DBUtilsRetrievalMethod.with_position(value=3)
|
|
73
|
+
def get_dbutils_from_pypspark_import(spark: Optional['SparkSession'] = None, **kwargs):
|
|
74
|
+
try:
|
|
75
|
+
from pyspark.dbutils import DBUtils # type: ignore[import]
|
|
76
|
+
return DBUtils(spark)
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.warning(f"Failed to get dbutils from pyspark context: {e}")
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
@DBUtilsRetrievalMethod.with_position(value=4)
|
|
83
|
+
def get_dbutils_from_jvm_context(spark: Optional['SparkSession'] = None, **kwargs):
|
|
84
|
+
try:
|
|
85
|
+
if spark is None:
|
|
86
|
+
from pyspark.sql import SparkSession
|
|
87
|
+
spark = SparkSession.builder.getOrCreate()
|
|
88
|
+
return spark._jvm.dbutils
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.warning(f"Failed to get dbutils from JVM context: {e}")
|
|
91
|
+
return None
|
|
92
|
+
|
|
93
|
+
@classmethod
|
|
94
|
+
def find_dbutils(
|
|
95
|
+
cls,
|
|
96
|
+
spark: Optional['SparkSession'] = None,
|
|
97
|
+
client: Optional['WorkspaceClient'] = None,
|
|
98
|
+
start_with_index: int = 0,
|
|
99
|
+
):
|
|
100
|
+
get_dbutils_methods = sorted(
|
|
101
|
+
[
|
|
102
|
+
method
|
|
103
|
+
for method in dir(cls)
|
|
104
|
+
if method.startswith("get_dbutils_from_")
|
|
105
|
+
],
|
|
106
|
+
key=lambda m: getattr(cls, m).position
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
def try_methods(methods: list[str], index: int = 0):
|
|
110
|
+
# Early exit if index is out of bounds
|
|
111
|
+
if index >= len(methods):
|
|
112
|
+
return None
|
|
113
|
+
# Recursively try each method until one succeeds or we run out of options
|
|
114
|
+
get_dbutils_method, *others = methods[index:]
|
|
115
|
+
if (dbutils := getattr(cls, get_dbutils_method)(spark=spark, client=client)):
|
|
116
|
+
return dbutils
|
|
117
|
+
return try_methods(others, index + 1) if others else None
|
|
118
|
+
|
|
119
|
+
return try_methods(methods=get_dbutils_methods, index=start_with_index)
|
|
120
|
+
|
|
121
|
+
def get(self, **kwargs):
|
|
122
|
+
if self.instance:
|
|
123
|
+
return self.instance
|
|
124
|
+
self.instance = self.find_dbutils(**kwargs)
|
|
125
|
+
return self.instance
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
@dataclass(frozen=True, slots=True)
|
|
129
|
+
class DBUtilsWrapper:
|
|
130
|
+
instance: Optional[Any] = None
|
|
131
|
+
|
|
132
|
+
@classmethod
|
|
133
|
+
def auto(
|
|
134
|
+
cls,
|
|
135
|
+
spark: Optional['SparkSession'] = None,
|
|
136
|
+
client: Optional['WorkspaceClient'] = None,
|
|
137
|
+
start_with_index: int = 0,
|
|
138
|
+
) -> 'DBUtilsWrapper':
|
|
139
|
+
return cls(
|
|
140
|
+
instance=DBUtilsFinder.find_dbutils(
|
|
141
|
+
spark=spark,
|
|
142
|
+
client=client,
|
|
143
|
+
start_with_index=start_with_index,
|
|
144
|
+
)
|
|
145
|
+
)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from fred.maturity import Maturity, MaturityLevel
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
module_maturity = Maturity(
|
|
5
|
+
level=MaturityLevel.ALPHA,
|
|
6
|
+
reference=__name__,
|
|
7
|
+
message=(
|
|
8
|
+
"Runpod integration is in early development "
|
|
9
|
+
"and therefore currently with incomplete and unstable features."
|
|
10
|
+
)
|
|
11
|
+
)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from fred.integrations.runpod.helper import HandlerHelper
|
|
2
|
+
from fred.cli.interface import IntegrationExtCLI
|
|
3
|
+
from fred.settings import logger_manager
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class RunPodExt(IntegrationExtCLI):
|
|
10
|
+
|
|
11
|
+
def get_handler_instance(self, import_pattern: str, handler_classname: str) -> HandlerHelper:
|
|
12
|
+
return HandlerHelper.find_handler(
|
|
13
|
+
import_pattern=import_pattern,
|
|
14
|
+
handler_classname=handler_classname,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
def execute_local(self, import_pattern: str, handler_classname: str, **kwargs) -> dict:
|
|
18
|
+
payload = kwargs.pop("payload", {})
|
|
19
|
+
handler = self.get_handler_instance(
|
|
20
|
+
import_pattern=import_pattern,
|
|
21
|
+
handler_classname=handler_classname,
|
|
22
|
+
)
|
|
23
|
+
return handler.run(
|
|
24
|
+
event={
|
|
25
|
+
"id": "local-exec",
|
|
26
|
+
"payload": payload
|
|
27
|
+
}
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
def execute(self, import_pattern: str, handler_classname: str, local: bool = False, **kwargs):
|
|
31
|
+
# Early exit and redirect to local execution when specified.
|
|
32
|
+
if local:
|
|
33
|
+
return self.execute_local(import_pattern, handler_classname, **kwargs)
|
|
34
|
+
|
|
35
|
+
# Lazy import to avoid dependency issues when not using RunPod
|
|
36
|
+
import runpod # type: ignore
|
|
37
|
+
|
|
38
|
+
logger.info(f"Starting RunPod serverless with handler '{handler_classname}' from '{import_pattern}'.")
|
|
39
|
+
handler = self.get_handler_instance(
|
|
40
|
+
import_pattern=import_pattern,
|
|
41
|
+
handler_classname=handler_classname,
|
|
42
|
+
)
|
|
43
|
+
runpod.serverless.start(
|
|
44
|
+
{
|
|
45
|
+
"handler": handler.run,
|
|
46
|
+
**kwargs,
|
|
47
|
+
}
|
|
48
|
+
)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from fred.settings import logger_manager
|
|
6
|
+
|
|
7
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True, slots=True)
|
|
11
|
+
class HandlerHelper:
|
|
12
|
+
|
|
13
|
+
@classmethod
|
|
14
|
+
def find_handler(cls, import_pattern: str, handler_classname: str) -> 'HandlerHelper':
|
|
15
|
+
import importlib
|
|
16
|
+
|
|
17
|
+
# Dynamically import the handler class
|
|
18
|
+
handler_module = importlib.import_module(import_pattern)
|
|
19
|
+
handler_cls = getattr(handler_module, handler_classname)
|
|
20
|
+
# Ensure the handler class exists and is a subclass of HandlerHelper
|
|
21
|
+
if not handler_cls or not issubclass(handler_cls, cls):
|
|
22
|
+
logger.error(f"Handler class '{handler_classname}' not found or is not a subclass of HandlerHelper.")
|
|
23
|
+
raise ValueError(f"Handler '{handler_classname}' not found in module '{import_pattern}' or is not a subclass of HandlerHelper.")
|
|
24
|
+
return handler_cls()
|
|
25
|
+
|
|
26
|
+
def handler(self, payload: dict) -> Optional[dict]:
|
|
27
|
+
logger.warning("Handler method not implemented.")
|
|
28
|
+
return payload
|
|
29
|
+
|
|
30
|
+
def run(self, event: dict) -> dict:
|
|
31
|
+
job_event_identifier = event.get("id")
|
|
32
|
+
payload = event.get("payload", {})
|
|
33
|
+
start_time = time.perf_counter()
|
|
34
|
+
ok = True
|
|
35
|
+
try:
|
|
36
|
+
response = self.handler(payload=payload)
|
|
37
|
+
except Exception as e:
|
|
38
|
+
ok = False
|
|
39
|
+
logger.error(f"Error processing event {job_event_identifier}: {e}")
|
|
40
|
+
response = {
|
|
41
|
+
"error": str(e)
|
|
42
|
+
}
|
|
43
|
+
return {
|
|
44
|
+
"id": job_event_identifier,
|
|
45
|
+
"duration": time.perf_counter() - start_time,
|
|
46
|
+
"response": response,
|
|
47
|
+
"ok": ok,
|
|
48
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import enum
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from fred.settings import (
|
|
7
|
+
logger_manager,
|
|
8
|
+
get_environ_variable,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
logger = logger_manager.get_logger(name=__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class MaturityLevel(enum.Enum):
|
|
16
|
+
ALPHA = (
|
|
17
|
+
"Alpha Maturity Level"
|
|
18
|
+
": Initial development stage with potential instability and limited or unreliable features."
|
|
19
|
+
)
|
|
20
|
+
BETA = (
|
|
21
|
+
"Beta Maturity Level"
|
|
22
|
+
": More stable than Alpha, with additional features and ongoing testing."
|
|
23
|
+
)
|
|
24
|
+
STABLE = (
|
|
25
|
+
"Stable Maturity Level"
|
|
26
|
+
": Fully tested and mostly reliable, suitable for production use, but use at your own risk!"
|
|
27
|
+
)
|
|
28
|
+
TO_BE_DEPRECATED = (
|
|
29
|
+
"To Be Deprecated Maturity Level"
|
|
30
|
+
": Functionality is planned to be removed in future releases. Consider alternatives."
|
|
31
|
+
)
|
|
32
|
+
DEPRECATED = (
|
|
33
|
+
"Deprecated Maturity Level"
|
|
34
|
+
": Functionality is no longer supported and may be removed in future releases."
|
|
35
|
+
)
|
|
36
|
+
REMOVED = (
|
|
37
|
+
"Removed Maturity Level"
|
|
38
|
+
": Functionality has been removed from the codebase and is no longer available."
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
def is_stable(self) -> bool:
|
|
42
|
+
return self == MaturityLevel.STABLE
|
|
43
|
+
|
|
44
|
+
def is_unstable(self) -> bool:
|
|
45
|
+
return self in {MaturityLevel.ALPHA, MaturityLevel.BETA, MaturityLevel.TO_BE_DEPRECATED}
|
|
46
|
+
|
|
47
|
+
def is_deprecated(self) -> bool:
|
|
48
|
+
return self in {MaturityLevel.DEPRECATED, MaturityLevel.REMOVED}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass(frozen=True, slots=True)
|
|
52
|
+
class Maturity:
|
|
53
|
+
level: MaturityLevel
|
|
54
|
+
reference: Optional[str] = None
|
|
55
|
+
message: Optional[str] = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def __post_init__(self):
|
|
59
|
+
if not self.level.is_stable() and not self.quiet:
|
|
60
|
+
logger.warning(
|
|
61
|
+
"Functionality for {reference} is at {level} maturity level. {message}".format(
|
|
62
|
+
reference=self.reference or "undefined",
|
|
63
|
+
level=self.level.name,
|
|
64
|
+
message=self.message or ""
|
|
65
|
+
)
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def quiet(self) -> bool:
|
|
70
|
+
return get_environ_variable(
|
|
71
|
+
name="FRD_DISABLE_MATURITY_WARN",
|
|
72
|
+
default="0",
|
|
73
|
+
).upper() in ["1", "Y", "T", "TRUE", "YES"]
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import enum
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Callable, Dict, Literal, NoReturn, Optional, Union, TypeVar, overload
|
|
5
|
+
from logging.config import dictConfig
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@overload
|
|
11
|
+
def get_environ_variable(
|
|
12
|
+
name: str,
|
|
13
|
+
default: Literal[None] = None,
|
|
14
|
+
enforce: bool = False,
|
|
15
|
+
apply: Optional[Callable[[Optional[str]], T]] = None,
|
|
16
|
+
) -> Optional[Union[str, T]]:
|
|
17
|
+
...
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@overload
|
|
21
|
+
def get_environ_variable(
|
|
22
|
+
name: str,
|
|
23
|
+
default: str,
|
|
24
|
+
enforce: bool = False,
|
|
25
|
+
apply: Optional[Callable[[str], T]] = None,
|
|
26
|
+
) -> Union[str, T]:
|
|
27
|
+
...
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_environ_variable(
|
|
31
|
+
name: str,
|
|
32
|
+
default: Optional[str] = None,
|
|
33
|
+
enforce: bool = False,
|
|
34
|
+
apply: Optional[Union[Callable[[Optional[str]], T], Callable[[str], T]]] = None,
|
|
35
|
+
) -> Optional[Union[Optional[str], T]]:
|
|
36
|
+
return (apply or (lambda x: x))( # type: ignore
|
|
37
|
+
os.environ.get(name, default=default) if not enforce else # type: ignore
|
|
38
|
+
os.environ.get(name) or (lambda: (_ for _ in ())
|
|
39
|
+
.throw(ValueError(f"Missing environ variable: {name}")))()
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Environment variables for the OpenAI API
|
|
44
|
+
FRD_OPENAI_API_KEY = get_environ_variable(
|
|
45
|
+
name="FRD_OPENAI_API_KEY",
|
|
46
|
+
enforce=False,
|
|
47
|
+
)
|
|
48
|
+
FRD_OPENAI_BASE_URL = get_environ_variable(
|
|
49
|
+
name="FRD_OPENAI_BASE_URL",
|
|
50
|
+
default="https://api.openai.com/v1"
|
|
51
|
+
)
|
|
52
|
+
FRD_OPENAI_DEFAULT_MODEL = get_environ_variable(
|
|
53
|
+
"FRD_OPENAI_DEFAULT_MODEL",
|
|
54
|
+
default="openai/gpt-oss-20b"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
# Logger configuration
|
|
59
|
+
default_logger_configuration = {
|
|
60
|
+
"version": 1,
|
|
61
|
+
"disable_existing_loggers": False,
|
|
62
|
+
"formatters": {
|
|
63
|
+
"standard": {
|
|
64
|
+
"format": get_environ_variable(
|
|
65
|
+
name="DEFAULT_PYTHON_LOGGER",
|
|
66
|
+
default="%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
|
67
|
+
)
|
|
68
|
+
},
|
|
69
|
+
},
|
|
70
|
+
"handlers": {
|
|
71
|
+
"default": {
|
|
72
|
+
"level": get_environ_variable(
|
|
73
|
+
name="DEFAULT_PYTHON_HANDLER_LEVEL",
|
|
74
|
+
default="INFO"
|
|
75
|
+
),
|
|
76
|
+
"formatter": "standard",
|
|
77
|
+
"class": "logging.StreamHandler",
|
|
78
|
+
},
|
|
79
|
+
},
|
|
80
|
+
"loggers": {
|
|
81
|
+
"": {
|
|
82
|
+
"handlers": [
|
|
83
|
+
"default"
|
|
84
|
+
],
|
|
85
|
+
"level": get_environ_variable(
|
|
86
|
+
name="DEFAULT_PYTHON_LOGGER_LEVEL",
|
|
87
|
+
default="INFO",
|
|
88
|
+
),
|
|
89
|
+
"propagate": True
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def configure_logger_callable(config_dictionary: Optional[Dict] = None) -> Callable:
|
|
96
|
+
def get_logger(name: str):
|
|
97
|
+
dictConfig(config_dictionary or default_logger_configuration)
|
|
98
|
+
return logging.getLogger(name)
|
|
99
|
+
return get_logger
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class LoggerManager:
|
|
103
|
+
class _Singleton:
|
|
104
|
+
count: int = 0
|
|
105
|
+
value: Optional['LoggerManager'] = None
|
|
106
|
+
|
|
107
|
+
def __new__(cls, *args, **kwargs):
|
|
108
|
+
if cls._Singleton.count == 0:
|
|
109
|
+
instance = super(LoggerManager, cls).__new__(cls)
|
|
110
|
+
cls._Singleton.count += 1
|
|
111
|
+
cls._Singleton.value = instance
|
|
112
|
+
return instance
|
|
113
|
+
return cls._Singleton.value
|
|
114
|
+
|
|
115
|
+
def __init__(self, config_dictionary: Optional[Dict] = None, disable_singleton: bool = False):
|
|
116
|
+
self.config_dictionary: Dict = config_dictionary or default_logger_configuration
|
|
117
|
+
|
|
118
|
+
if not disable_singleton and self._Singleton.count > 0:
|
|
119
|
+
return
|
|
120
|
+
self._Singleton.count += 1
|
|
121
|
+
self._Singleton.value = self
|
|
122
|
+
|
|
123
|
+
def set_configuration(self, **kwargs):
|
|
124
|
+
self.config_dictionary = kwargs
|
|
125
|
+
|
|
126
|
+
def get_logger(self, name: str, overwrite_config_dictionary: Optional[Dict] = None):
|
|
127
|
+
dictConfig(overwrite_config_dictionary or self.config_dictionary)
|
|
128
|
+
return logging.getLogger(name)
|
|
129
|
+
|
|
130
|
+
@classmethod
|
|
131
|
+
def singleton(cls, overwrite_config_dictionary: Optional[Dict] = None) -> 'LoggerManager':
|
|
132
|
+
if overwrite_config_dictionary or cls._Singleton.value is None:
|
|
133
|
+
cls._Singleton.value = cls(overwrite_config_dictionary)
|
|
134
|
+
if cls._Singleton.count > 1:
|
|
135
|
+
cls._Singleton.value.get_logger(name=__name__).warning("Creating a new logger manager instance.")
|
|
136
|
+
return cls._Singleton.value
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
logger_manager = LoggerManager.singleton()
|
|
140
|
+
|
|
141
|
+
if "openrouter" in FRD_OPENAI_BASE_URL and not FRD_OPENAI_DEFAULT_MODEL.endswith(":free"):
|
|
142
|
+
logger_manager.get_logger(__name__).warning("Using OpenRouter with a non-free model.")
|
|
File without changes
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import datetime as dt
|
|
2
|
+
from dataclasses import dataclass, asdict
|
|
3
|
+
|
|
4
|
+
from fred.utils.dateops import datetime_utcnow
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass(frozen=True, slots=True)
|
|
8
|
+
class RuntimeProfilingSnapshot:
|
|
9
|
+
snapshot_at: dt.datetime
|
|
10
|
+
cpu_percent: float
|
|
11
|
+
virtual_memory_percent: float
|
|
12
|
+
swap_memory_percent: float
|
|
13
|
+
disk_usage_percent: float
|
|
14
|
+
|
|
15
|
+
@classmethod
|
|
16
|
+
def auto(cls) -> 'RuntimeProfilingSnapshot':
|
|
17
|
+
import psutil
|
|
18
|
+
|
|
19
|
+
return cls(
|
|
20
|
+
snapshot_at=datetime_utcnow(),
|
|
21
|
+
cpu_percent=psutil.cpu_percent(interval=1),
|
|
22
|
+
virtual_memory_percent=psutil.virtual_memory().percent,
|
|
23
|
+
swap_memory_percent=psutil.swap_memory().percent,
|
|
24
|
+
disk_usage_percent=psutil.disk_usage("/").percent,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
def to_dict(self) -> dict:
|
|
28
|
+
payload = asdict(self)
|
|
29
|
+
payload["snapshot_at"] = self.snapshot_at.isoformat()
|
|
30
|
+
return payload
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass(frozen=True, slots=True)
|
|
34
|
+
class RuntimeInfo:
|
|
35
|
+
snapshot_at: dt.datetime
|
|
36
|
+
python_version: str
|
|
37
|
+
platform: str
|
|
38
|
+
processor: str
|
|
39
|
+
modules: list[str]
|
|
40
|
+
profiling_snapshots: list[RuntimeProfilingSnapshot]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@classmethod
|
|
44
|
+
def auto(cls, exclude_initial_profile: bool = False) -> 'RuntimeInfo':
|
|
45
|
+
import platform
|
|
46
|
+
|
|
47
|
+
return cls(
|
|
48
|
+
python_version=platform.python_version(),
|
|
49
|
+
platform=platform.platform(),
|
|
50
|
+
processor=platform.processor(),
|
|
51
|
+
modules=sorted(cls.get_modules()),
|
|
52
|
+
snapshot_at=datetime_utcnow(),
|
|
53
|
+
profiling_snapshots=[] if exclude_initial_profile else [
|
|
54
|
+
RuntimeProfilingSnapshot.auto()
|
|
55
|
+
],
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def append_profiling_snapshot(self):
|
|
59
|
+
self.profiling_snapshots.append(
|
|
60
|
+
RuntimeProfilingSnapshot.auto()
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
@staticmethod
|
|
64
|
+
def get_modules():
|
|
65
|
+
import sys
|
|
66
|
+
import pkgutil
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
module.name
|
|
70
|
+
for module in pkgutil.iter_modules()
|
|
71
|
+
}.union(sys.builtin_module_names)
|
|
72
|
+
|
|
73
|
+
def to_dict(
|
|
74
|
+
self,
|
|
75
|
+
exclude_modules: bool = False,
|
|
76
|
+
exclude_profiling_snapshots: bool = False,
|
|
77
|
+
) -> dict:
|
|
78
|
+
payload = asdict(self)
|
|
79
|
+
payload["snapshot_at"] = self.snapshot_at.isoformat()
|
|
80
|
+
payload["profiling_snapshots"] = [
|
|
81
|
+
snapshot.to_dict() for snapshot in self.profiling_snapshots
|
|
82
|
+
]
|
|
83
|
+
if exclude_modules:
|
|
84
|
+
payload.pop("modules", None)
|
|
85
|
+
if exclude_profiling_snapshots:
|
|
86
|
+
payload.pop("profiling_snapshots", None)
|
|
87
|
+
return payload
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
0.1.0
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Dict, List, Optional
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass(frozen=True, slots=True)
|
|
7
|
+
class Version:
|
|
8
|
+
name: str
|
|
9
|
+
value: str
|
|
10
|
+
|
|
11
|
+
def get_upcoming_options(self) -> List['Version']:
|
|
12
|
+
vals = self.components(as_int=True)
|
|
13
|
+
return [
|
|
14
|
+
Version(
|
|
15
|
+
name=self.name,
|
|
16
|
+
value=".".join(
|
|
17
|
+
str(v)
|
|
18
|
+
for j, v in enumerate(vals[:i] + [val+1] + (2-i)*[0])
|
|
19
|
+
),
|
|
20
|
+
)
|
|
21
|
+
for i, val in enumerate(vals)
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
def get_upcoming_options_with_tag(self) -> Dict[str, Dict]:
|
|
25
|
+
tags = ["major", "minor", "patch"]
|
|
26
|
+
return {
|
|
27
|
+
version.value: {
|
|
28
|
+
"version": version,
|
|
29
|
+
"scope": tags[index],
|
|
30
|
+
}
|
|
31
|
+
for index, version in enumerate(self.get_upcoming_options())
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
def upcoming(
|
|
35
|
+
self,
|
|
36
|
+
major: bool = False,
|
|
37
|
+
minor: bool = False,
|
|
38
|
+
patch: bool = False,
|
|
39
|
+
) -> 'Version':
|
|
40
|
+
ver_maj, ver_min, ver_pat = self.get_upcoming_options()
|
|
41
|
+
if sum([major, minor, patch]) != 1:
|
|
42
|
+
raise ValueError
|
|
43
|
+
return ver_maj if major else ver_min if minor else ver_pat
|
|
44
|
+
|
|
45
|
+
def components(self, as_int: bool = False) -> list:
|
|
46
|
+
return [int(val) if as_int else val for val in self.value.split(".")]
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def major(self) -> int:
|
|
50
|
+
component, *_ = self.components(as_int=True)
|
|
51
|
+
return component
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def minor(self) -> int:
|
|
55
|
+
_, component, *_ = self.components(as_int=True)
|
|
56
|
+
return component
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def patch(self) -> int:
|
|
60
|
+
*_, component = self.components(as_int=True)
|
|
61
|
+
return component
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def from_path(cls, dirpath: str, name: str):
|
|
65
|
+
for file in os.listdir(dirpath):
|
|
66
|
+
if file.lower().endswith("version"):
|
|
67
|
+
filepath = os.path.join(dirpath, file)
|
|
68
|
+
break
|
|
69
|
+
else:
|
|
70
|
+
raise ValueError("Version file not found for package name: " + name)
|
|
71
|
+
|
|
72
|
+
with open(filepath, "r") as version_file:
|
|
73
|
+
version_value = version_file.readline().strip() # TODO: Validate version pattern via regex
|
|
74
|
+
return cls(name=name, value=version_value)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
version = Version.from_path(name="fred", dirpath=os.path.dirname(__file__))
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fred-oss
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: FREDOSS
|
|
5
|
+
Home-page: https://fred.fahera.mx
|
|
6
|
+
Author: Fahera Research, Education, and Development
|
|
7
|
+
Author-email: fred@fahera.mx
|
|
8
|
+
Requires-Python: >=3.12
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
License-File: NOTICE.txt
|
|
11
|
+
Requires-Dist: fire==0.7.1
|
|
12
|
+
Dynamic: author
|
|
13
|
+
Dynamic: author-email
|
|
14
|
+
Dynamic: description
|
|
15
|
+
Dynamic: description-content-type
|
|
16
|
+
Dynamic: home-page
|
|
17
|
+
Dynamic: license-file
|
|
18
|
+
Dynamic: requires-dist
|
|
19
|
+
Dynamic: requires-python
|
|
20
|
+
Dynamic: summary
|
|
21
|
+
|
|
22
|
+
# FREDOSS
|
|
23
|
+
|
|
24
|
+
This is the open-source baseline python package called `fred` package by `fred.fahera.mx` (Fahera's Research, Education, and Development Team).
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
```
|
|
29
|
+
$ pip install fred-oss
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
By default, the `fred-oss` package will only install the `default` dependencies. You can control which
|
|
33
|
+
dependency set to use via the 'dependency tags' via the following pattern:
|
|
34
|
+
|
|
35
|
+
```
|
|
36
|
+
$ pip install 'fred-oss[<tag-1>,<tag-2>,...]'
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
Where `<tag-i>` can be:
|
|
40
|
+
* `default`
|
|
41
|
+
* `all`
|
|
42
|
+
* ...
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
MANIFEST.in
|
|
2
|
+
NOTICE.txt
|
|
3
|
+
README.md
|
|
4
|
+
requirements.txt
|
|
5
|
+
setup.py
|
|
6
|
+
src/main/fred/maturity.py
|
|
7
|
+
src/main/fred/settings.py
|
|
8
|
+
src/main/fred/version
|
|
9
|
+
src/main/fred/version.py
|
|
10
|
+
src/main/fred/cli/__init__.py
|
|
11
|
+
src/main/fred/cli/__main__.py
|
|
12
|
+
src/main/fred/cli/interface.py
|
|
13
|
+
src/main/fred/cli/main.py
|
|
14
|
+
src/main/fred/integrations/databricks/__init__.py
|
|
15
|
+
src/main/fred/integrations/databricks/cli_ext.py
|
|
16
|
+
src/main/fred/integrations/databricks/runtime.py
|
|
17
|
+
src/main/fred/integrations/databricks/runtimes/__init__.py
|
|
18
|
+
src/main/fred/integrations/databricks/runtimes/scanner.py
|
|
19
|
+
src/main/fred/integrations/databricks/runtimes/sync.py
|
|
20
|
+
src/main/fred/integrations/databricks/wrappers/__init__.py
|
|
21
|
+
src/main/fred/integrations/databricks/wrappers/dbutils.py
|
|
22
|
+
src/main/fred/integrations/runpod/__init__.py
|
|
23
|
+
src/main/fred/integrations/runpod/cli_ext.py
|
|
24
|
+
src/main/fred/integrations/runpod/helper.py
|
|
25
|
+
src/main/fred/utils/__init__.py
|
|
26
|
+
src/main/fred/utils/dateops.py
|
|
27
|
+
src/main/fred/utils/runtime.py
|
|
28
|
+
src/main/fred_oss.egg-info/PKG-INFO
|
|
29
|
+
src/main/fred_oss.egg-info/SOURCES.txt
|
|
30
|
+
src/main/fred_oss.egg-info/dependency_links.txt
|
|
31
|
+
src/main/fred_oss.egg-info/entry_points.txt
|
|
32
|
+
src/main/fred_oss.egg-info/requires.txt
|
|
33
|
+
src/main/fred_oss.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
fire==0.7.1
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
fred
|