splitnstitch 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- splitnstitch-0.1.0/LICENSE +21 -0
- splitnstitch-0.1.0/PKG-INFO +46 -0
- splitnstitch-0.1.0/README.md +2 -0
- splitnstitch-0.1.0/pyproject.toml +39 -0
- splitnstitch-0.1.0/setup.cfg +4 -0
- splitnstitch-0.1.0/src/splitnstitch/__init__.py +0 -0
- splitnstitch-0.1.0/src/splitnstitch/__main__.py +37 -0
- splitnstitch-0.1.0/src/splitnstitch/splitnstitch.py +335 -0
- splitnstitch-0.1.0/src/splitnstitch.egg-info/PKG-INFO +46 -0
- splitnstitch-0.1.0/src/splitnstitch.egg-info/SOURCES.txt +12 -0
- splitnstitch-0.1.0/src/splitnstitch.egg-info/dependency_links.txt +1 -0
- splitnstitch-0.1.0/src/splitnstitch.egg-info/entry_points.txt +2 -0
- splitnstitch-0.1.0/src/splitnstitch.egg-info/requires.txt +3 -0
- splitnstitch-0.1.0/src/splitnstitch.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 espehon
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: splitnstitch
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: CLI tool for splitting and merging table files (safe export / import merge).
|
|
5
|
+
Author: espehon
|
|
6
|
+
License: MIT License
|
|
7
|
+
|
|
8
|
+
Copyright (c) 2026 espehon
|
|
9
|
+
|
|
10
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
+
in the Software without restriction, including without limitation the rights
|
|
13
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
+
furnished to do so, subject to the following conditions:
|
|
16
|
+
|
|
17
|
+
The above copyright notice and this permission notice shall be included in all
|
|
18
|
+
copies or substantial portions of the Software.
|
|
19
|
+
|
|
20
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
+
SOFTWARE.
|
|
27
|
+
|
|
28
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
29
|
+
Classifier: Programming Language :: Python
|
|
30
|
+
Classifier: Programming Language :: Python :: 3
|
|
31
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
32
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
33
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
34
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
35
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
36
|
+
Classifier: Operating System :: OS Independent
|
|
37
|
+
Requires-Python: >=3.8
|
|
38
|
+
Description-Content-Type: text/markdown
|
|
39
|
+
License-File: LICENSE
|
|
40
|
+
Requires-Dist: pandas==3.0.0
|
|
41
|
+
Requires-Dist: questionary==2.1.1
|
|
42
|
+
Requires-Dist: halo==0.0.31
|
|
43
|
+
Dynamic: license-file
|
|
44
|
+
|
|
45
|
+
# splitnstitch
|
|
46
|
+
Split a data table to create a safe-to-share version or stitch them back together
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "splitnstitch"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "CLI tool for splitting and merging table files (safe export / import merge)."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.8"
|
|
11
|
+
authors = [ { name = "espehon" } ]
|
|
12
|
+
license = { file = "LICENSE" }
|
|
13
|
+
dependencies = [
|
|
14
|
+
"pandas==3.0.0",
|
|
15
|
+
"questionary==2.1.1",
|
|
16
|
+
"halo==0.0.31"
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
classifiers = [
|
|
20
|
+
"License :: OSI Approved :: MIT License",
|
|
21
|
+
"Programming Language :: Python",
|
|
22
|
+
"Programming Language :: Python :: 3",
|
|
23
|
+
"Programming Language :: Python :: 3 :: Only",
|
|
24
|
+
"Programming Language :: Python :: 3.8",
|
|
25
|
+
"Programming Language :: Python :: 3.9",
|
|
26
|
+
"Programming Language :: Python :: 3.10",
|
|
27
|
+
"Programming Language :: Python :: 3.11",
|
|
28
|
+
"Operating System :: OS Independent",
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
[tool.setuptools]
|
|
32
|
+
package-dir = { "" = "src" }
|
|
33
|
+
|
|
34
|
+
[tool.setuptools.packages.find]
|
|
35
|
+
where = ["src"]
|
|
36
|
+
|
|
37
|
+
[project.scripts]
|
|
38
|
+
# console script entry point: `sns`
|
|
39
|
+
sns = "splitnstitch.__main__:main"
|
|
File without changes
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import argparse
|
|
3
|
+
|
|
4
|
+
import importlib.metadata
|
|
5
|
+
from splitnstitch.splitnstitch import main as _interactive_main
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
__version__ = importlib.metadata.version('splitnstitch')
|
|
9
|
+
except importlib.metadata.PackageNotFoundError:
|
|
10
|
+
__version__ = "Package not installed..."
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def main(argv=None):
|
|
14
|
+
"""Entry point for console script.
|
|
15
|
+
|
|
16
|
+
Supports -?/--help and -v/--version. If no version flag is passed,
|
|
17
|
+
runs the interactive `main()` from `splitnstitch.splitnstitch`.
|
|
18
|
+
"""
|
|
19
|
+
parser = argparse.ArgumentParser(add_help=False)
|
|
20
|
+
parser.add_argument('-?', '--help', action='help', help='show this help message and exit')
|
|
21
|
+
parser.add_argument('-v', '--version', action='store_true', help='show program version and exit')
|
|
22
|
+
args, _rest = parser.parse_known_args(argv)
|
|
23
|
+
|
|
24
|
+
if args.version:
|
|
25
|
+
print(__version__)
|
|
26
|
+
return 0
|
|
27
|
+
|
|
28
|
+
# No version flag -> run interactive CLI
|
|
29
|
+
try:
|
|
30
|
+
_interactive_main()
|
|
31
|
+
except SystemExit as e:
|
|
32
|
+
return e.code if isinstance(e.code, int) else 0
|
|
33
|
+
return 0
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == '__main__':
|
|
37
|
+
raise SystemExit(main(sys.argv[1:]))
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
from itertools import islice
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import questionary as q
|
|
7
|
+
from halo import Halo
|
|
8
|
+
|
|
9
|
+
spinner = Halo(text='Processing...', spinner='dots')
|
|
10
|
+
sample_size = 20 # Number of sample values to show during inspection
|
|
11
|
+
|
|
12
|
+
splash_screen = r"""
|
|
13
|
+
____ ___ __ _ __ ______ _ __ __
|
|
14
|
+
/ __/__ / (_) /_ ____ / |/ / ____ / __/ /_(_) /_____/ /
|
|
15
|
+
_\ \/ _ \/ / / __/ /___/ / / /___/ _\ \/ __/ / __/ __/ _ \
|
|
16
|
+
/___/ .__/_/_/\__/ /_/|_/ /___/\__/_/\__/\__/_//_/
|
|
17
|
+
/_/
|
|
18
|
+
``````````````````````````````````````````````````````````````
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def clear_console() -> None:
|
|
23
|
+
"""Clear console in a cross-platform way."""
|
|
24
|
+
try:
|
|
25
|
+
os.system("cls" if os.name == "nt" else "clear")
|
|
26
|
+
except Exception:
|
|
27
|
+
# As a last resort, print many newlines
|
|
28
|
+
print("\n" * 80)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def choose_file(prompt, supported_formats=('.csv', '.xlsx')):
|
|
32
|
+
try:
|
|
33
|
+
files_in_dir = [f for f in os.listdir('.') if f.endswith(supported_formats)]
|
|
34
|
+
assert files_in_dir, "🔎 No supported files found in current directory."
|
|
35
|
+
file_path = q.select(prompt, choices=files_in_dir).ask()
|
|
36
|
+
return file_path
|
|
37
|
+
except AssertionError as e:
|
|
38
|
+
print(e)
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def read_file(file_path):
|
|
43
|
+
if file_path.endswith('.csv'):
|
|
44
|
+
return pd.read_csv(file_path, dtype=str)
|
|
45
|
+
elif file_path.endswith('.xlsx'):
|
|
46
|
+
return pd.read_excel(file_path, dtype=str)
|
|
47
|
+
elif file_path.endswith('.json'):
|
|
48
|
+
return pd.read_json(file_path)
|
|
49
|
+
else:
|
|
50
|
+
raise ValueError("Unsupported file format.")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def join_detail(df):
|
|
54
|
+
detail_file = choose_file("Select detail file to join:")
|
|
55
|
+
if not detail_file:
|
|
56
|
+
print("No detail file selected. Skipping detail join.")
|
|
57
|
+
return df
|
|
58
|
+
for attempt in range(3, 0, -1):
|
|
59
|
+
try:
|
|
60
|
+
q.press_any_key_to_continue(f"Make sure {detail_file} if refreshed and file is closed.\nPress any key to continue...").ask()
|
|
61
|
+
detail_df = read_file(detail_file)
|
|
62
|
+
break
|
|
63
|
+
except PermissionError:
|
|
64
|
+
print(f"⚠️ File is in use or locked. {attempt - 1} attempt(s) remain...")
|
|
65
|
+
if attempt == 1:
|
|
66
|
+
print("❌ Unable to access package detail file. Skipping detail join.")
|
|
67
|
+
return df
|
|
68
|
+
except KeyboardInterrupt:
|
|
69
|
+
print("Operation cancelled by user.")
|
|
70
|
+
return df
|
|
71
|
+
except Exception as e:
|
|
72
|
+
print(f"❌ Error reading package detail file: {e}")
|
|
73
|
+
return df
|
|
74
|
+
|
|
75
|
+
all_columns = detail_df.columns.tolist()
|
|
76
|
+
detail_key_column = q.select("Select the key column (from detail) to join on:", choices=all_columns).ask()
|
|
77
|
+
try:
|
|
78
|
+
all_columns.remove(detail_key_column)
|
|
79
|
+
except ValueError:
|
|
80
|
+
print(f"Warning: Key column '{detail_key_column}' not found in detail file.")
|
|
81
|
+
print("Proceeding without joining details.")
|
|
82
|
+
return df
|
|
83
|
+
columns_to_add = q.checkbox(
|
|
84
|
+
"Select detail columns to add:",
|
|
85
|
+
choices=all_columns
|
|
86
|
+
).ask()
|
|
87
|
+
if not columns_to_add:
|
|
88
|
+
print("No detail columns selected.")
|
|
89
|
+
return df
|
|
90
|
+
source_key_column = q.select("Select the key column (from source) to join on:", choices=df.columns.tolist()).ask()
|
|
91
|
+
spinner.start("Joining detail data...")
|
|
92
|
+
try:
|
|
93
|
+
pkg_subset = detail_df[[detail_key_column] + columns_to_add]
|
|
94
|
+
|
|
95
|
+
# Change source key to match detail key
|
|
96
|
+
df.rename(columns={source_key_column: detail_key_column}, inplace=True)
|
|
97
|
+
|
|
98
|
+
merged_df = df.merge(
|
|
99
|
+
right=pkg_subset,
|
|
100
|
+
on=detail_key_column,
|
|
101
|
+
how='left'
|
|
102
|
+
)
|
|
103
|
+
spinner.succeed("Detail data joined.")
|
|
104
|
+
return merged_df
|
|
105
|
+
except Exception as e:
|
|
106
|
+
spinner.fail("Failed to join detail data.")
|
|
107
|
+
print(f"Error: {e}")
|
|
108
|
+
return df
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def export_safe():
|
|
113
|
+
# Get safe columns file
|
|
114
|
+
safe_columns_file = choose_file("Select the SAFE COLUMNS file to use:", supported_formats=('.json',))
|
|
115
|
+
if not safe_columns_file:
|
|
116
|
+
user = q.confirm("Would you like to create a safe columns file now?", default=True).ask()
|
|
117
|
+
if user:
|
|
118
|
+
create_safe_columns_file()
|
|
119
|
+
print("Please restart the export process.")
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
# Load safe column list
|
|
123
|
+
with open(safe_columns_file, "r") as f:
|
|
124
|
+
safe_cols = set(json.load(f))
|
|
125
|
+
|
|
126
|
+
in_file = choose_file("Select the INTERNAL (Source) file to export a safe version from:")
|
|
127
|
+
if not in_file:
|
|
128
|
+
print("No file selected.")
|
|
129
|
+
return
|
|
130
|
+
|
|
131
|
+
# Optionally join package details
|
|
132
|
+
join_details = q.confirm("Would you like to join detail data?", default=False).ask()
|
|
133
|
+
|
|
134
|
+
spinner.start()
|
|
135
|
+
try:
|
|
136
|
+
df = read_file(in_file)
|
|
137
|
+
|
|
138
|
+
# Select only safe columns that actually exist
|
|
139
|
+
export_cols = [c for c in df.columns if c in safe_cols]
|
|
140
|
+
df_safe = df[export_cols]
|
|
141
|
+
|
|
142
|
+
if join_details:
|
|
143
|
+
spinner.succeed("Export data prepared, joining details...")
|
|
144
|
+
df_safe = join_detail(df_safe)
|
|
145
|
+
else:
|
|
146
|
+
spinner.succeed("Export data prepared.")
|
|
147
|
+
|
|
148
|
+
out_file = q.text("Enter output filename:", default=f"{in_file.split('.')[0]}_Clean").ask()
|
|
149
|
+
if not out_file:
|
|
150
|
+
return
|
|
151
|
+
spinner.start("Finalizing export...")
|
|
152
|
+
if not out_file.endswith('.csv'):
|
|
153
|
+
out_file += '.csv'
|
|
154
|
+
df_safe.to_csv(out_file, index=False)
|
|
155
|
+
|
|
156
|
+
spinner.succeed("Export complete.")
|
|
157
|
+
print(f"Safe export created: {out_file}")
|
|
158
|
+
except Exception as e:
|
|
159
|
+
spinner.fail("Export failed.")
|
|
160
|
+
print(f"Error: {e}")
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def import_merge():
|
|
164
|
+
internal = choose_file("Select ORIGINAL internal (Source) file:")
|
|
165
|
+
if not internal:
|
|
166
|
+
return
|
|
167
|
+
external = choose_file("Select 3rd party RETURNED (Cleaned) file:")
|
|
168
|
+
if not external:
|
|
169
|
+
return
|
|
170
|
+
|
|
171
|
+
spinner.start("Loading files...")
|
|
172
|
+
try:
|
|
173
|
+
df_int = read_file(internal)
|
|
174
|
+
df_ext = read_file(external)
|
|
175
|
+
spinner.succeed("Files loaded.")
|
|
176
|
+
except Exception as e:
|
|
177
|
+
spinner.fail("Failed to load files.")
|
|
178
|
+
print(e)
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
source_key_column = q.select("Select the ORIGINAL KEY column to merge on:", choices=df_int.columns.tolist()).ask()
|
|
182
|
+
if not source_key_column:
|
|
183
|
+
return
|
|
184
|
+
|
|
185
|
+
spinner.start()
|
|
186
|
+
try:
|
|
187
|
+
|
|
188
|
+
if source_key_column not in df_int.columns:
|
|
189
|
+
spinner.fail(f"ID column '{source_key_column}' not found in internal file.")
|
|
190
|
+
return
|
|
191
|
+
if source_key_column not in df_ext.columns:
|
|
192
|
+
spinner.fail(f"ID column '{source_key_column}' not found in external file.")
|
|
193
|
+
return
|
|
194
|
+
|
|
195
|
+
# Only add NEW columns from external
|
|
196
|
+
new_cols = [c for c in df_ext.columns if c not in df_int.columns]
|
|
197
|
+
|
|
198
|
+
df_merge = df_int.merge(
|
|
199
|
+
right=df_ext[[source_key_column] + new_cols],
|
|
200
|
+
on=source_key_column,
|
|
201
|
+
how="left"
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
# Ask if details should be added
|
|
205
|
+
spinner.succeed("Merging complete")
|
|
206
|
+
if q.confirm("Would you like to join detail file? (Select 'N' if it was already added)", default=False).ask():
|
|
207
|
+
df_merge = join_detail(df_merge)
|
|
208
|
+
|
|
209
|
+
out_file = q.text("Enter output filename:", default=f"{internal.split('.')[0]}_Merged").ask()
|
|
210
|
+
if not out_file:
|
|
211
|
+
return
|
|
212
|
+
spinner.start("Creating file...")
|
|
213
|
+
if not out_file.endswith('.csv'):
|
|
214
|
+
out_file += '.csv'
|
|
215
|
+
df_merge.to_csv(out_file, index=False)
|
|
216
|
+
|
|
217
|
+
spinner.succeed("Import and merge complete.")
|
|
218
|
+
print(f"Merged file created: {out_file}")
|
|
219
|
+
except Exception as e:
|
|
220
|
+
spinner.fail("Import failed.")
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def inspect_column_safety(column: pd.Series) -> bool:
|
|
224
|
+
while True:
|
|
225
|
+
# Print header
|
|
226
|
+
print(f"Inspecting column: {column.name}")
|
|
227
|
+
print(f"{'-'*40}")
|
|
228
|
+
# Show sample values
|
|
229
|
+
sample_values = column.sample(min(sample_size, len(column))).tolist()
|
|
230
|
+
for val in sample_values:
|
|
231
|
+
print(f" {val}")
|
|
232
|
+
print() # Extra newline for spacing
|
|
233
|
+
user = q.select(
|
|
234
|
+
"Is this column SAFE to include in the export?",
|
|
235
|
+
choices=[
|
|
236
|
+
"Yes, safe to include.",
|
|
237
|
+
"No, contains sensitive data.",
|
|
238
|
+
"Show sample values again."
|
|
239
|
+
]).ask()
|
|
240
|
+
if user.startswith("Yes"):
|
|
241
|
+
return True
|
|
242
|
+
elif user.startswith("No"):
|
|
243
|
+
return False
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def create_safe_columns_file():
|
|
247
|
+
def preview_last_choices(choices: dict, N: int=0) -> None:
|
|
248
|
+
# N=0 means show all
|
|
249
|
+
# Dict preserves insertion order (Py3.7+)
|
|
250
|
+
if choices:
|
|
251
|
+
print("Last choices made:")
|
|
252
|
+
if N == 0:
|
|
253
|
+
# Show all, in original order
|
|
254
|
+
last_n = list(choices.items())
|
|
255
|
+
else:
|
|
256
|
+
last_n = list(islice(reversed(choices.items()), N))[::-1]
|
|
257
|
+
for key, value in last_n:
|
|
258
|
+
if value is True:
|
|
259
|
+
q.print(f"🔓 (Safe) {key}", style="fg:green")
|
|
260
|
+
else:
|
|
261
|
+
q.print(f"🔒 (Unsafe) {key}", style="fg:red")
|
|
262
|
+
print() # Extra newline for spacing
|
|
263
|
+
|
|
264
|
+
in_file = choose_file("Select a file to generate safe columns from:")
|
|
265
|
+
if not in_file:
|
|
266
|
+
print("No file selected.")
|
|
267
|
+
return
|
|
268
|
+
try:
|
|
269
|
+
df = read_file(in_file)
|
|
270
|
+
cols = df.columns.tolist()
|
|
271
|
+
except Exception as e:
|
|
272
|
+
print(f"❌ Error reading file: {e}")
|
|
273
|
+
|
|
274
|
+
if not q.confirm("Ready to loop through every column for inspection? (This may take a while, grab some ☕)", default=True).ask():
|
|
275
|
+
print("Maybe another time...")
|
|
276
|
+
return
|
|
277
|
+
|
|
278
|
+
safe_columns = {}
|
|
279
|
+
for col in cols:
|
|
280
|
+
clear_console()
|
|
281
|
+
preview_last_choices(safe_columns, N=5)
|
|
282
|
+
try:
|
|
283
|
+
safe_columns[col] = inspect_column_safety(df[col])
|
|
284
|
+
except KeyboardInterrupt:
|
|
285
|
+
print("Operation cancelled by user.")
|
|
286
|
+
return
|
|
287
|
+
except Exception as e:
|
|
288
|
+
print(f"Error inspecting column '{col}': {e}")
|
|
289
|
+
if q.confirm("Mark this column as unsafe and continue?", default=True).ask():
|
|
290
|
+
safe_columns[col] = False
|
|
291
|
+
else:
|
|
292
|
+
return
|
|
293
|
+
|
|
294
|
+
clear_console()
|
|
295
|
+
preview_last_choices(safe_columns)
|
|
296
|
+
out_file = q.text("Enter output filename for safe columns:", default="safe_columns").ask()
|
|
297
|
+
if not out_file.endswith('.json'):
|
|
298
|
+
out_file += '.json'
|
|
299
|
+
with open(out_file, "w") as f:
|
|
300
|
+
json.dump([k for k, v in safe_columns.items() if v], f, indent=4)
|
|
301
|
+
print(f"✅ Safe columns file created: {out_file}")
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def main():
|
|
305
|
+
print(splash_screen)
|
|
306
|
+
try:
|
|
307
|
+
choice = q.select(
|
|
308
|
+
message="Choose an operation:",
|
|
309
|
+
choices=[
|
|
310
|
+
"1[Split] Export Safe version of Internal (Source) File.",
|
|
311
|
+
"2[Stitch] Import and Merge External (Returned) File.",
|
|
312
|
+
"3[Generate] Create Safe Columns File.",
|
|
313
|
+
"0[Exit] Quit the program."
|
|
314
|
+
]
|
|
315
|
+
).ask()
|
|
316
|
+
if not choice:
|
|
317
|
+
return
|
|
318
|
+
choice = choice.split('[')[0]
|
|
319
|
+
if choice == "0":
|
|
320
|
+
print("Goodbye!")
|
|
321
|
+
return
|
|
322
|
+
elif choice == "1":
|
|
323
|
+
export_safe()
|
|
324
|
+
elif choice == "2":
|
|
325
|
+
import_merge()
|
|
326
|
+
elif choice == "3":
|
|
327
|
+
create_safe_columns_file()
|
|
328
|
+
else:
|
|
329
|
+
print("Invalid choice.")
|
|
330
|
+
except KeyboardInterrupt:
|
|
331
|
+
spinner.warn("Operation cancelled by user.")
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
if __name__ == "__main__":
|
|
335
|
+
main()
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: splitnstitch
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: CLI tool for splitting and merging table files (safe export / import merge).
|
|
5
|
+
Author: espehon
|
|
6
|
+
License: MIT License
|
|
7
|
+
|
|
8
|
+
Copyright (c) 2026 espehon
|
|
9
|
+
|
|
10
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
+
in the Software without restriction, including without limitation the rights
|
|
13
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
+
furnished to do so, subject to the following conditions:
|
|
16
|
+
|
|
17
|
+
The above copyright notice and this permission notice shall be included in all
|
|
18
|
+
copies or substantial portions of the Software.
|
|
19
|
+
|
|
20
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
+
SOFTWARE.
|
|
27
|
+
|
|
28
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
29
|
+
Classifier: Programming Language :: Python
|
|
30
|
+
Classifier: Programming Language :: Python :: 3
|
|
31
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
32
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
33
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
34
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
35
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
36
|
+
Classifier: Operating System :: OS Independent
|
|
37
|
+
Requires-Python: >=3.8
|
|
38
|
+
Description-Content-Type: text/markdown
|
|
39
|
+
License-File: LICENSE
|
|
40
|
+
Requires-Dist: pandas==3.0.0
|
|
41
|
+
Requires-Dist: questionary==2.1.1
|
|
42
|
+
Requires-Dist: halo==0.0.31
|
|
43
|
+
Dynamic: license-file
|
|
44
|
+
|
|
45
|
+
# splitnstitch
|
|
46
|
+
Split a data table to create a safe-to-share version or stitch them back together
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
src/splitnstitch/__init__.py
|
|
5
|
+
src/splitnstitch/__main__.py
|
|
6
|
+
src/splitnstitch/splitnstitch.py
|
|
7
|
+
src/splitnstitch.egg-info/PKG-INFO
|
|
8
|
+
src/splitnstitch.egg-info/SOURCES.txt
|
|
9
|
+
src/splitnstitch.egg-info/dependency_links.txt
|
|
10
|
+
src/splitnstitch.egg-info/entry_points.txt
|
|
11
|
+
src/splitnstitch.egg-info/requires.txt
|
|
12
|
+
src/splitnstitch.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
splitnstitch
|