satif-ai 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- satif_ai/__init__.py +19 -0
- satif_ai/adapters/tidy.py +19 -38
- satif_ai/standardize.py +112 -0
- satif_ai/standardizers/ai.py +485 -0
- satif_ai/standardizers/ai_csv.py +1 -1
- satif_ai/transform.py +121 -0
- satif_ai/{code_builders/transformation.py → transformation_builders/syncpulse.py} +22 -29
- satif_ai/utils/__init__.py +5 -0
- satif_ai/utils/merge_sdif.py +22 -0
- satif_ai/utils/openai_mcp.py +97 -0
- satif_ai/utils/zip.py +120 -0
- {satif_ai-0.2.8.dist-info → satif_ai-0.2.9.dist-info}/METADATA +4 -3
- satif_ai-0.2.9.dist-info/RECORD +19 -0
- satif_ai/code_builders/adaptation.py +0 -9
- satif_ai-0.2.8.dist-info/RECORD +0 -13
- /satif_ai/{code_builders → transformation_builders}/__init__.py +0 -0
- {satif_ai-0.2.8.dist-info → satif_ai-0.2.9.dist-info}/LICENSE +0 -0
- {satif_ai-0.2.8.dist-info → satif_ai-0.2.9.dist-info}/WHEEL +0 -0
- {satif_ai-0.2.8.dist-info → satif_ai-0.2.9.dist-info}/entry_points.txt +0 -0
@@ -8,7 +8,9 @@ from typing import Any, Dict, List, Optional, Union
|
|
8
8
|
from agents import Agent, Runner, function_tool
|
9
9
|
from agents.mcp.server import MCPServer
|
10
10
|
from mcp import ClientSession
|
11
|
-
from satif_core import
|
11
|
+
from satif_core import AsyncTransformationBuilder
|
12
|
+
from satif_core.types import FilePath
|
13
|
+
from satif_sdk.code_executors.local_executor import LocalCodeExecutor
|
12
14
|
from satif_sdk.comparators import get_comparator
|
13
15
|
from satif_sdk.representers import get_representer
|
14
16
|
from satif_sdk.transformers import CodeTransformer
|
@@ -61,7 +63,10 @@ async def execute_transformation(code: str) -> str:
|
|
61
63
|
if INPUT_SDIF_PATH is None or OUTPUT_TARGET_FILES is None:
|
62
64
|
return "Error: Transformation context not initialized"
|
63
65
|
|
64
|
-
code_transformer = CodeTransformer(
|
66
|
+
code_transformer = CodeTransformer(
|
67
|
+
function=code,
|
68
|
+
code_executor=LocalCodeExecutor(disable_security_warning=True),
|
69
|
+
)
|
65
70
|
generated_output_path = code_transformer.export(INPUT_SDIF_PATH)
|
66
71
|
|
67
72
|
comparisons = []
|
@@ -120,19 +125,7 @@ async def execute_transformation(code: str) -> str:
|
|
120
125
|
return "\n".join(comparisons)
|
121
126
|
|
122
127
|
|
123
|
-
class
|
124
|
-
def __init__(self, output_example: Path | List[Path] | Dict[str, Path]):
|
125
|
-
self.output_example = output_example
|
126
|
-
|
127
|
-
def build(
|
128
|
-
self,
|
129
|
-
sdif: Path | SDIFDatabase,
|
130
|
-
instructions: Optional[str] = None,
|
131
|
-
) -> str:
|
132
|
-
pass
|
133
|
-
|
134
|
-
|
135
|
-
class TransformationAsyncCodeBuilder(AsyncCodeBuilder):
|
128
|
+
class SyncpulseTransformationBuilder(AsyncTransformationBuilder):
|
136
129
|
"""This class is used to build a transformation code that will be used to transform a SDIF database into a set of files following the format of the given output files."""
|
137
130
|
|
138
131
|
def __init__(
|
@@ -147,23 +140,18 @@ class TransformationAsyncCodeBuilder(AsyncCodeBuilder):
|
|
147
140
|
|
148
141
|
async def build(
|
149
142
|
self,
|
150
|
-
sdif: Path,
|
151
|
-
output_target_files: Dict[
|
152
|
-
output_sdif: Optional[Path] = None,
|
143
|
+
sdif: Path,
|
144
|
+
output_target_files: Dict[FilePath, str] | List[FilePath] | FilePath,
|
145
|
+
output_sdif: Optional[Path] = None,
|
153
146
|
instructions: str = "",
|
154
147
|
schema_only: bool = False,
|
155
|
-
|
148
|
+
representer_kwargs: Optional[Dict[str, Any]] = None,
|
156
149
|
) -> str:
|
157
150
|
global INPUT_SDIF_PATH, OUTPUT_TARGET_FILES, SCHEMA_ONLY
|
158
|
-
|
159
|
-
# If execute_transformation runs in the same process as the builder, absolute path is fine.
|
160
|
-
# If it were a separate context, this might need adjustment.
|
161
|
-
# For now, assume execute_transformation can access absolute paths if needed for its *input SDIF*.
|
162
|
-
# However, the sdif for MCP URIs must be relative.
|
151
|
+
|
163
152
|
INPUT_SDIF_PATH = Path(sdif).resolve()
|
164
153
|
SCHEMA_ONLY = schema_only
|
165
|
-
#
|
166
|
-
# So, use them directly as strings.
|
154
|
+
# We must encode the path because special characters are not allowed in mcp read_resource()
|
167
155
|
input_sdif_mcp_uri_path = base64.b64encode(str(sdif).encode()).decode()
|
168
156
|
output_sdif_mcp_uri_path = (
|
169
157
|
base64.b64encode(str(output_sdif).encode()).decode()
|
@@ -205,9 +193,14 @@ class TransformationAsyncCodeBuilder(AsyncCodeBuilder):
|
|
205
193
|
|
206
194
|
# OUTPUT_TARGET_FILES keys are absolute paths to original example files for local reading by representers/comparators.
|
207
195
|
# Values are agent-facing filenames.
|
208
|
-
if isinstance(output_target_files,
|
196
|
+
if isinstance(output_target_files, FilePath):
|
197
|
+
OUTPUT_TARGET_FILES = {
|
198
|
+
Path(output_target_files).resolve(): Path(output_target_files).name
|
199
|
+
}
|
200
|
+
elif isinstance(output_target_files, list):
|
209
201
|
OUTPUT_TARGET_FILES = {
|
210
|
-
file_path.resolve(): file_path.name
|
202
|
+
Path(file_path).resolve(): Path(file_path).name
|
203
|
+
for file_path in output_target_files
|
211
204
|
}
|
212
205
|
elif isinstance(output_target_files, dict):
|
213
206
|
temp_map = {}
|
@@ -229,7 +222,7 @@ class TransformationAsyncCodeBuilder(AsyncCodeBuilder):
|
|
229
222
|
# Representer uses the absolute path (file_key_abs_path) to read the example file.
|
230
223
|
representer = get_representer(file_key_abs_path)
|
231
224
|
representation, used_params = representer.represent(
|
232
|
-
file_key_abs_path, **(
|
225
|
+
file_key_abs_path, **(representer_kwargs or {})
|
233
226
|
)
|
234
227
|
output_representation[agent_facing_name] = {
|
235
228
|
"representation": representation,
|
@@ -0,0 +1,22 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import List
|
3
|
+
|
4
|
+
|
5
|
+
async def merge_sdif_files(sdif_paths: List[Path], output_dir: Path) -> Path:
|
6
|
+
"""Placeholder function to merge multiple SDIF files into one.
|
7
|
+
|
8
|
+
Args:
|
9
|
+
sdif_paths: A list of paths to the SDIF files to merge.
|
10
|
+
output_dir: The directory where the merged file should be saved.
|
11
|
+
|
12
|
+
Returns:
|
13
|
+
Path to the merged SDIF file.
|
14
|
+
"""
|
15
|
+
if not sdif_paths:
|
16
|
+
raise ValueError("No SDIF files provided for merging.")
|
17
|
+
|
18
|
+
if len(sdif_paths) == 1:
|
19
|
+
return sdif_paths[0] # No merge needed
|
20
|
+
|
21
|
+
# TODO: Implement SDIF merge
|
22
|
+
raise NotImplementedError("Merge not implemented yet.")
|
@@ -0,0 +1,97 @@
|
|
1
|
+
import logging
|
2
|
+
from typing import Any
|
3
|
+
|
4
|
+
from agents.mcp.server import CallToolResult, MCPServer, MCPTool
|
5
|
+
from fastmcp import FastMCP
|
6
|
+
|
7
|
+
logger = logging.getLogger(__name__)
|
8
|
+
|
9
|
+
|
10
|
+
class OpenAICompatibleMCP(MCPServer):
|
11
|
+
def __init__(self, mcp: FastMCP):
|
12
|
+
self.mcp = mcp
|
13
|
+
self._is_connected = False # Track connection state
|
14
|
+
|
15
|
+
async def connect(self):
|
16
|
+
"""Connect to the server.
|
17
|
+
For FastMCP, connection is managed externally when the server is run.
|
18
|
+
This method marks the wrapper as connected.
|
19
|
+
"""
|
20
|
+
# Assuming FastMCP instance is already running and configured.
|
21
|
+
# No specific connect action required for the FastMCP instance itself here,
|
22
|
+
# as its lifecycle (run, stop) is managed outside this wrapper.
|
23
|
+
logger.info(
|
24
|
+
f"OpenAICompatibleMCP: Simulating connection to FastMCP server '{self.mcp.name}'."
|
25
|
+
)
|
26
|
+
self._is_connected = True
|
27
|
+
|
28
|
+
@property
|
29
|
+
def name(self) -> str:
|
30
|
+
"""A readable name for the server."""
|
31
|
+
return self.mcp.name
|
32
|
+
|
33
|
+
async def cleanup(self):
|
34
|
+
"""Cleanup the server.
|
35
|
+
For FastMCP, cleanup is managed externally. This method marks the wrapper as disconnected.
|
36
|
+
"""
|
37
|
+
# Similar to connect, actual server cleanup is external.
|
38
|
+
logger.info(
|
39
|
+
f"OpenAICompatibleMCP: Simulating cleanup for FastMCP server '{self.mcp.name}'."
|
40
|
+
)
|
41
|
+
self._is_connected = False
|
42
|
+
|
43
|
+
async def list_tools(self) -> list[MCPTool]:
|
44
|
+
"""List the tools available on the server."""
|
45
|
+
if not self._is_connected:
|
46
|
+
# Or raise an error, depending on desired behavior for disconnected state
|
47
|
+
raise RuntimeError(
|
48
|
+
"OpenAICompatibleMCP.list_tools called while not connected."
|
49
|
+
)
|
50
|
+
|
51
|
+
# FastMCP's get_tools() returns a dict[str, fastmcp.tools.tool.Tool]
|
52
|
+
# Each fastmcp.tools.tool.Tool has a to_mcp_tool(name=key) method
|
53
|
+
# MCPTool is an alias for mcp.types.Tool
|
54
|
+
try:
|
55
|
+
fastmcp_tools = await self.mcp.get_tools()
|
56
|
+
mcp_tools_list = [
|
57
|
+
tool.to_mcp_tool(name=key) for key, tool in fastmcp_tools.items()
|
58
|
+
]
|
59
|
+
return mcp_tools_list
|
60
|
+
except Exception as e:
|
61
|
+
logger.error(
|
62
|
+
f"Error listing tools from FastMCP server '{self.mcp.name}': {e}",
|
63
|
+
exc_info=True,
|
64
|
+
)
|
65
|
+
raise e
|
66
|
+
|
67
|
+
async def call_tool(
|
68
|
+
self, tool_name: str, arguments: dict[str, Any] | None
|
69
|
+
) -> CallToolResult:
|
70
|
+
"""Invoke a tool on the server."""
|
71
|
+
if not self._is_connected:
|
72
|
+
logger.warning(
|
73
|
+
f"OpenAICompatibleMCP.call_tool '{tool_name}' called while not connected."
|
74
|
+
)
|
75
|
+
# Return an error CallToolResult
|
76
|
+
return CallToolResult(
|
77
|
+
content=[{"type": "text", "text": "Server not connected"}], isError=True
|
78
|
+
)
|
79
|
+
|
80
|
+
try:
|
81
|
+
# FastMCP's _mcp_call_tool is a protected member, but seems to be what we need.
|
82
|
+
# It returns: list[TextContent | ImageContent | EmbeddedResource]
|
83
|
+
# This matches the 'content' part of CallToolResult.
|
84
|
+
# We need to handle potential errors and wrap the result.
|
85
|
+
content = await self.mcp._mcp_call_tool(tool_name, arguments or {})
|
86
|
+
return CallToolResult(content=content, isError=False)
|
87
|
+
except Exception as e:
|
88
|
+
logger.error(
|
89
|
+
f"Error calling tool '{tool_name}' on FastMCP server '{self.mcp.name}': {e}",
|
90
|
+
exc_info=True,
|
91
|
+
)
|
92
|
+
error_message = f"Error calling tool '{tool_name}': {type(e).__name__}: {e}"
|
93
|
+
# Ensure content is a list of valid MCP content items, even for errors.
|
94
|
+
# A TextContent is a safe choice.
|
95
|
+
return CallToolResult(
|
96
|
+
content=[{"type": "text", "text": error_message}], isError=True
|
97
|
+
)
|
satif_ai/utils/zip.py
ADDED
@@ -0,0 +1,120 @@
|
|
1
|
+
import asyncio
|
2
|
+
import logging
|
3
|
+
import zipfile
|
4
|
+
from pathlib import Path
|
5
|
+
from typing import List, Tuple
|
6
|
+
|
7
|
+
logger = logging.getLogger(__name__)
|
8
|
+
|
9
|
+
# Constants for ZIP file processing, kept local to this utility or passed as args if needed
|
10
|
+
_IGNORED_ZIP_MEMBER_PREFIXES = ("__MACOSX/",)
|
11
|
+
_IGNORED_ZIP_FILENAME_PREFIXES = ("._",)
|
12
|
+
_IGNORED_ZIP_FILENAMES = (".DS_Store",)
|
13
|
+
|
14
|
+
|
15
|
+
async def extract_zip_archive_async(
|
16
|
+
zip_path: Path,
|
17
|
+
extract_to: Path,
|
18
|
+
ignored_member_prefixes: Tuple[str, ...] = _IGNORED_ZIP_MEMBER_PREFIXES,
|
19
|
+
ignored_filename_prefixes: Tuple[str, ...] = _IGNORED_ZIP_FILENAME_PREFIXES,
|
20
|
+
ignored_filenames: Tuple[str, ...] = _IGNORED_ZIP_FILENAMES,
|
21
|
+
) -> List[Path]:
|
22
|
+
"""
|
23
|
+
Asynchronously extracts a ZIP archive to a specified directory, filtering out ignored files.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
zip_path: Path to the ZIP archive.
|
27
|
+
extract_to: Directory where the contents will be extracted.
|
28
|
+
ignored_member_prefixes: Tuple of member path prefixes to ignore.
|
29
|
+
ignored_filename_prefixes: Tuple of filename prefixes to ignore.
|
30
|
+
ignored_filenames: Tuple of exact filenames to ignore.
|
31
|
+
|
32
|
+
Returns:
|
33
|
+
A list of paths to the successfully extracted files.
|
34
|
+
|
35
|
+
Raises:
|
36
|
+
ValueError: If the zip_path is invalid or corrupted.
|
37
|
+
RuntimeError: If any other error occurs during extraction.
|
38
|
+
"""
|
39
|
+
|
40
|
+
def blocking_extract() -> List[Path]:
|
41
|
+
extracted_file_paths = []
|
42
|
+
logger.info(f"Extracting ZIP archive '{zip_path.name}' to '{extract_to}'...")
|
43
|
+
try:
|
44
|
+
extract_to.mkdir(
|
45
|
+
parents=True, exist_ok=True
|
46
|
+
) # Ensure extract_to directory exists
|
47
|
+
|
48
|
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
49
|
+
# Security: Preliminary check for unsafe paths before extraction
|
50
|
+
for member_name in zip_ref.namelist():
|
51
|
+
if member_name.startswith(("/", "..")):
|
52
|
+
logger.error(
|
53
|
+
f"Skipping potentially unsafe path in ZIP: {member_name}"
|
54
|
+
)
|
55
|
+
# Depending on security policy, might raise an error here
|
56
|
+
continue
|
57
|
+
|
58
|
+
# Extract all members
|
59
|
+
zip_ref.extractall(extract_to)
|
60
|
+
|
61
|
+
# After extractall, collect all *file* paths, applying filters
|
62
|
+
# This second pass of filtering ensures that even if extractall creates them,
|
63
|
+
# we don't return paths to ignored files.
|
64
|
+
for root, _, files in extract_to.walk():
|
65
|
+
for filename in files:
|
66
|
+
full_path = root / filename
|
67
|
+
# Create a path relative to 'extract_to' to check against member prefixes
|
68
|
+
# This ensures that '__MACOSX/file.txt' is correctly ignored,
|
69
|
+
# not just a top-level '__MACOSX' directory.
|
70
|
+
try:
|
71
|
+
relative_path_to_check = full_path.relative_to(extract_to)
|
72
|
+
except ValueError:
|
73
|
+
# This can happen if full_path is not under extract_to,
|
74
|
+
# which ideally shouldn't occur if zip_ref.extractall worked as expected
|
75
|
+
# and target_path checks were effective.
|
76
|
+
logger.warning(
|
77
|
+
f"File {full_path} seems to be outside extraction root {extract_to}. Skipping."
|
78
|
+
)
|
79
|
+
continue
|
80
|
+
|
81
|
+
path_str_to_check_prefixes = str(relative_path_to_check)
|
82
|
+
|
83
|
+
if not (
|
84
|
+
any(
|
85
|
+
path_str_to_check_prefixes.startswith(p)
|
86
|
+
for p in ignored_member_prefixes
|
87
|
+
)
|
88
|
+
or any(
|
89
|
+
full_path.name.startswith(p)
|
90
|
+
for p in ignored_filename_prefixes
|
91
|
+
)
|
92
|
+
or full_path.name in ignored_filenames
|
93
|
+
):
|
94
|
+
extracted_file_paths.append(full_path)
|
95
|
+
else:
|
96
|
+
logger.debug(f"Ignoring file post-extraction: {full_path}")
|
97
|
+
|
98
|
+
if not extracted_file_paths:
|
99
|
+
logger.warning(
|
100
|
+
f"ZIP archive '{zip_path.name}' is empty or contains no processable files after filtering."
|
101
|
+
)
|
102
|
+
else:
|
103
|
+
logger.info(
|
104
|
+
f"Successfully extracted {len(extracted_file_paths)} file(s) from '{zip_path.name}'."
|
105
|
+
)
|
106
|
+
return extracted_file_paths
|
107
|
+
except zipfile.BadZipFile as e:
|
108
|
+
logger.error(
|
109
|
+
f"Invalid or corrupted ZIP file: {zip_path.name}", exc_info=True
|
110
|
+
)
|
111
|
+
raise ValueError(f"Invalid or corrupted ZIP file: {zip_path.name}") from e
|
112
|
+
except Exception as e:
|
113
|
+
logger.error(
|
114
|
+
f"Failed to extract ZIP archive '{zip_path.name}': {e}", exc_info=True
|
115
|
+
)
|
116
|
+
raise RuntimeError(
|
117
|
+
f"Unexpected error during ZIP extraction for '{zip_path.name}'"
|
118
|
+
) from e
|
119
|
+
|
120
|
+
return await asyncio.to_thread(blocking_extract)
|
@@ -1,10 +1,11 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: satif-ai
|
3
|
-
Version: 0.2.
|
3
|
+
Version: 0.2.9
|
4
4
|
Summary: AI Agents for Satif
|
5
5
|
License: MIT
|
6
|
-
Author:
|
7
|
-
|
6
|
+
Author: Syncpulse
|
7
|
+
Maintainer: Bryan Djafer
|
8
|
+
Maintainer-email: bryan.djafer@syncpulse.fr
|
8
9
|
Requires-Python: >=3.10,<4.0
|
9
10
|
Classifier: License :: OSI Approved :: MIT License
|
10
11
|
Classifier: Programming Language :: Python :: 3
|
@@ -0,0 +1,19 @@
|
|
1
|
+
satif_ai/__init__.py,sha256=cqJ6Kd9IolVodPi9yOBPnfhYQXH5a1JgRB3HfLOtP_4,611
|
2
|
+
satif_ai/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
+
satif_ai/adapters/tidy.py,sha256=lcJXFmzEgCFy1W57kgbMOkoFTPLOkrvHC6NHVRKn-04,18549
|
4
|
+
satif_ai/standardize.py,sha256=TgAB_nhcHY8zqlfT1PpgfgSswqdE-ly-dheQz-7NC7Q,5674
|
5
|
+
satif_ai/standardizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
+
satif_ai/standardizers/ai.py,sha256=5vv-Rs6s_9FA21uM2iepTsbv6f3adZ8wFteOcW53z_s,21458
|
7
|
+
satif_ai/standardizers/ai_csv.py,sha256=tMibsTp55sHJ56r7cYKjb5b0Hm6rdnV3TeA0EppIWJg,25371
|
8
|
+
satif_ai/transform.py,sha256=iy9prkBCknRcsSXWOY_NwtNojQVcRW_luYFwkcjOnPw,5600
|
9
|
+
satif_ai/transformation_builders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
+
satif_ai/transformation_builders/syncpulse.py,sha256=c59BZicNnqs3NDKpflBAPqw42pGb6nYB2Zps0ChGyaM,11368
|
11
|
+
satif_ai/utils/__init__.py,sha256=F-usaCt_vX872mXvtukuZdNMPnkVqDb8RaDgox2uow4,212
|
12
|
+
satif_ai/utils/merge_sdif.py,sha256=-BXsCaLDHEtKOQRWOKyVCNefFwkyVygFQs8NeeFONFA,663
|
13
|
+
satif_ai/utils/openai_mcp.py,sha256=duCQZXG0mBs9DOOFIUvzraJhxD2IDzegWO9iOiLfFwY,3938
|
14
|
+
satif_ai/utils/zip.py,sha256=G_GK8629Iw0TLFCQJfnqOscv7MoKF5zdzxvEAbL7Gss,5186
|
15
|
+
satif_ai-0.2.9.dist-info/LICENSE,sha256=kS8EN6yAaGZd7V5z6GKSn_x3ozcZltrfRky4vMPRCw8,1072
|
16
|
+
satif_ai-0.2.9.dist-info/METADATA,sha256=Vq62i6fUx8sKaM2mYVqRfGReHCTcFG_P6mW1otnx8GY,696
|
17
|
+
satif_ai-0.2.9.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
18
|
+
satif_ai-0.2.9.dist-info/entry_points.txt,sha256=Mz2SwYALjktap1bF-Q3EWBgiZVNT6QJCVsCs_fCV33Y,43
|
19
|
+
satif_ai-0.2.9.dist-info/RECORD,,
|
satif_ai-0.2.8.dist-info/RECORD
DELETED
@@ -1,13 +0,0 @@
|
|
1
|
-
satif_ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
satif_ai/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
satif_ai/adapters/tidy.py,sha256=2oYj7Gz3vOQtzcpoJI4JbftWlMKvOWL8rdwthjg-zUE,19884
|
4
|
-
satif_ai/code_builders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
|
-
satif_ai/code_builders/adaptation.py,sha256=E29YM0S6pMtAfB0uzSUexoeWKwXfF8iJVyYUCKWQz5k,188
|
6
|
-
satif_ai/code_builders/transformation.py,sha256=5B7a6lDv-gqQo83F8fQeSw2gHpDgznoDfjXsASkLc60,11870
|
7
|
-
satif_ai/standardizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
-
satif_ai/standardizers/ai_csv.py,sha256=c0CKnIib610GgwGqaF8NaqT_P4pZ2BupO-BTSNuIhoc,25385
|
9
|
-
satif_ai-0.2.8.dist-info/LICENSE,sha256=kS8EN6yAaGZd7V5z6GKSn_x3ozcZltrfRky4vMPRCw8,1072
|
10
|
-
satif_ai-0.2.8.dist-info/METADATA,sha256=CZBbNd1A-KL8eoOzmmz7bW3ue4HGOC2Qic60wQ-v6z8,670
|
11
|
-
satif_ai-0.2.8.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
12
|
-
satif_ai-0.2.8.dist-info/entry_points.txt,sha256=Mz2SwYALjktap1bF-Q3EWBgiZVNT6QJCVsCs_fCV33Y,43
|
13
|
-
satif_ai-0.2.8.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|