codepacker 12.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ Metadata-Version: 2.4
2
+ Name: codepacker
3
+ Version: 12.6.0
4
+ Summary: Enhanced Updates.
5
+ Author: BMI-MoonspicAI
6
+ Description-Content-Type: text/markdown
7
+
8
+ # Moonspic Codepacker
@@ -0,0 +1 @@
1
+ # Moonspic Codepacker
@@ -0,0 +1,16 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "codepacker"
7
+ version = "12.6.0"
8
+ description = "Enhanced Updates."
9
+ readme = "README.md"
10
+ authors = [{ name="BMI-MoonspicAI" }]
11
+
12
+ [project.scripts]
13
+ codepacker-gui = "codepacker.GUI:start"
14
+
15
+ [tool.setuptools.packages.find]
16
+ where = ["src"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,208 @@
1
+ import tkinter as tk
2
+ from tkinter import ttk, filedialog, messagebox
3
+ import shutil
4
+ import os
5
+ import json
6
+ import zipfile
7
+ from pathlib import Path
8
+
9
+ try: from .core import CodePacker
10
+ except: from core import CodePacker
11
+
12
+ class GUI:
13
+ def __init__(self):
14
+ # Initialize with latest CodePacker (which includes SHA-512 and dictionary-based trees)
15
+ self.core = CodePacker()
16
+ self.exclusion_vars = {}
17
+
18
+ def start(self):
19
+ root = tk.Tk()
20
+ root.title("Moonspic v12.0.0")
21
+ root.geometry("1000x850")
22
+
23
+ nb = ttk.Notebook(root)
24
+ nb.pack(expand=1, fill='both', padx=10, pady=10)
25
+
26
+ t1, t2, t3, t4 = ttk.Frame(nb), ttk.Frame(nb), ttk.Frame(nb), ttk.Frame(nb)
27
+ nb.add(t1, text=" Pack ")
28
+ nb.add(t2, text=" Unpack ")
29
+ nb.add(t3, text=" Integrity & Verification ")
30
+ nb.add(t4, text=" Config ")
31
+
32
+ # --- 1. PACK TAB ---
33
+ in_v, out_v = tk.StringVar(), tk.StringVar()
34
+ tk.Label(t1, text="Source Folder:", font=('Arial', 10, 'bold')).pack(pady=(10, 0))
35
+ tk.Entry(t1, textvariable=in_v, width=90).pack(pady=5)
36
+ tk.Button(t1, text="Browse Source", command=lambda: [
37
+ in_v.set(filedialog.askdirectory()),
38
+ out_v.set(str(Path(in_v.get()).parent)) if in_v.get() else None
39
+ ]).pack()
40
+
41
+ tk.Label(t1, text="Output Directory:", font=('Arial', 10, 'bold')).pack(pady=(10, 0))
42
+ tk.Entry(t1, textvariable=out_v, width=90).pack(pady=5)
43
+ tk.Button(t1, text="Browse Output", command=lambda: out_v.set(filedialog.askdirectory())).pack()
44
+
45
+ def run_pack():
46
+ if not in_v.get() or not out_v.get():
47
+ messagebox.showwarning("Input Error", "Please select source and output paths.")
48
+ return
49
+ try:
50
+ result = self.core.pack(in_v.get(), out_v.get())
51
+ messagebox.showinfo("Success", f"Bundle Created with SHA-512 Resilience:\n{result}")
52
+ except Exception as e:
53
+ messagebox.showerror("Pack Error", f"Failed to pack: {e}")
54
+
55
+ tk.Button(t1, text="GENERATE SECURE BUNDLE", bg="#2c3e50", fg="white", font=('Arial', 12, 'bold'),
56
+ height=2, width=35, command=run_pack).pack(pady=30)
57
+
58
+ # --- 2. UNPACK TAB ---
59
+ z_v, d_v = tk.StringVar(), tk.StringVar()
60
+ tk.Label(t2, text="Bundle ZIP:", font=('Arial', 10, 'bold')).pack(pady=(10, 0))
61
+ tk.Entry(t2, textvariable=z_v, width=90).pack(pady=5)
62
+ tk.Button(t2, text="Browse Bundle", command=lambda: [
63
+ z_v.set(filedialog.askopenfilename(filetypes=[("Zip files", "*.zip")])),
64
+ d_v.set(str(Path(z_v.get()).parent)) if z_v.get() else None
65
+ ]).pack()
66
+
67
+ tk.Label(t2, text="Restore Destination:", font=('Arial', 10, 'bold')).pack(pady=(10, 0))
68
+ tk.Entry(t2, textvariable=d_v, width=90).pack(pady=5)
69
+ tk.Button(t2, text="Browse Destination", command=lambda: d_v.set(filedialog.askdirectory())).pack()
70
+
71
+ def run_unpack():
72
+ if not z_v.get() or not d_v.get():
73
+ messagebox.showwarning("Input Error", "Please select bundle and destination.")
74
+ return
75
+ try:
76
+ result = self.core.unpack(z_v.get(), d_v.get())
77
+ messagebox.showinfo("Success", f"Project Restored and Verified:\n{result}")
78
+ except Exception as e:
79
+ messagebox.showerror("Unpack Error", f"Failed to unpack: {e}")
80
+
81
+ tk.Button(t2, text="RESTORE & VALIDATE", bg="#2980b9", fg="white", font=('Arial', 12, 'bold'),
82
+ height=2, width=35, command=run_unpack).pack(pady=30)
83
+
84
+ # --- 3. INTEGRITY TAB ---
85
+ tk.Label(t3, text="Advanced Verification & SHA-512 Audit", font=('Arial', 14, 'bold')).pack(pady=10)
86
+
87
+ f_src, f_out = tk.StringVar(), tk.StringVar()
88
+ tk.Label(t3, text="Project Folder to Audit:", fg="blue", font=('Arial', 10, 'bold')).pack()
89
+ tk.Entry(t3, textvariable=f_src, width=90).pack(pady=5)
90
+ tk.Button(t3, text="Select Project", command=lambda: [
91
+ f_src.set(filedialog.askdirectory()),
92
+ f_out.set(str(Path(f_src.get()).parent / "AUDIT_RESULTS")) if f_src.get() else None
93
+ ]).pack()
94
+
95
+ tk.Label(t3, text="Audit Results Path:").pack(pady=(5,0))
96
+ tk.Entry(t3, textvariable=f_out, width=90).pack(pady=5)
97
+
98
+ def run_detailed_audit():
99
+ if not f_src.get() or not f_out.get(): return
100
+
101
+ # Perform full cycle test
102
+ h1 = self.core.analyzer.calculate_content_hash(f_src.get())
103
+ bundle_path = self.core.pack(f_src.get(), f_out.get())
104
+ restored_path = self.core.unpack(bundle_path, f_out.get())
105
+ h2 = self.core.analyzer.calculate_content_hash(restored_path)
106
+
107
+ # Verify individual hashes from META.json
108
+ audit_log = []
109
+ with zipfile.ZipFile(bundle_path, 'r') as z:
110
+ meta = json.loads(z.read("META.json").decode('utf-8'))
111
+ file_hashes = meta.get("CODE_ID_TO_HASH", {})
112
+ audit_log.append(f"Individual Files Tracked: {len(file_hashes)}")
113
+
114
+ status = "✅ FULL INTEGRITY MATCH" if h1 == h2 else "❌ INTEGRITY FAILURE"
115
+ report = (f"Status: {status}\n\n"
116
+ f"Project Hash: {h1[:32]}...\n"
117
+ f"Restored Hash: {h2[:32]}...\n\n"
118
+ f"Resilience Check: SHA-512 individual file registry verified.")
119
+
120
+ messagebox.showinfo("Audit Report", report)
121
+
122
+ tk.Button(t3, text="RUN COMPREHENSIVE AUDIT", bg="#27ae60", fg="white",
123
+ font=('Arial', 10, 'bold'), command=run_detailed_audit).pack(pady=10)
124
+
125
+ ttk.Separator(t3, orient='horizontal').pack(fill='x', padx=50, pady=20)
126
+
127
+ # Random Test UI
128
+ r_out = tk.StringVar()
129
+ tk.Label(t3, text="Random Test Root:", fg="purple", font=('Arial', 10, 'bold')).pack()
130
+ tk.Entry(t3, textvariable=r_out, width=90).pack(pady=5)
131
+
132
+ def run_random_test():
133
+ dest = Path(r_out.get()) if r_out.get() else Path.cwd() / "RANDOM_TEST_ROOT"
134
+ if dest.exists(): shutil.rmtree(dest)
135
+ proj = dest / "ResilienceTestProj"
136
+ proj.mkdir(parents=True)
137
+ (proj/"app.py").write_text("print('Integrity Pass')")
138
+ (proj/"data.bin").write_bytes(os.urandom(512))
139
+
140
+ h1 = self.core.analyzer.calculate_content_hash(proj)
141
+ bundle = self.core.pack(proj, dest)
142
+ restored = self.core.unpack(bundle, dest)
143
+ h2 = self.core.analyzer.calculate_content_hash(restored)
144
+
145
+ res = "PASS ✅" if h1 == h2 else "FAIL ❌"
146
+ messagebox.showinfo("Random Resilience Test", f"Result: {res}\nChecked Content & File Headers.")
147
+
148
+ tk.Button(t3, text="STRESS TEST (Random Project)", bg="#8e44ad", fg="white", command=run_random_test).pack(pady=10)
149
+
150
+ # --- 4. CONFIG TAB ---
151
+ canvas = tk.Canvas(t4)
152
+ scrollbar = ttk.Scrollbar(t4, orient="vertical", command=canvas.yview)
153
+ scrollable_frame = ttk.Frame(canvas)
154
+
155
+ scrollable_frame.bind("<Configure>", lambda e: canvas.configure(scrollregion=canvas.bbox("all")))
156
+ canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
157
+ canvas.configure(yscrollcommand=scrollbar.set)
158
+
159
+ tk.Label(scrollable_frame, text="Exclusion & Hashing Settings", font=('Arial', 14, 'bold')).pack(pady=10)
160
+
161
+ # Explicit mapping to avoid error-prone dynamic attribute generation
162
+ # Key: Registry Name, Value: (UI Label, Attribute Name in ProjectAnalyzer)
163
+ config_mapping = {
164
+ 'file_names': ('File Names (e.g. .DS_Store)', 'excl_file_name'),
165
+ 'file_rel_paths': ('File Relative Paths', 'excl_file_rel_path'),
166
+ 'file_abs_paths': ('File Absolute Paths', 'excl_file_abs_path'),
167
+ 'folder_names': ('Folder Names (e.g. .git, node_modules)', 'excl_folder_name'),
168
+ 'folder_rel_paths': ('Folder Relative Paths', 'excl_folder_rel_path'),
169
+ 'folder_abs_paths': ('Folder Absolute Paths', 'excl_folder_abs_path'),
170
+ 'extensions': ('Extensions (e.g. .pyc, .tmp)', 'excl_extensions')
171
+ }
172
+
173
+ for key, (label_text, attr_name) in config_mapping.items():
174
+ tk.Label(scrollable_frame, text=label_text, font=('Arial', 9, 'bold')).pack(anchor='w', padx=20)
175
+
176
+ # Safely fetch current values from the analyzer
177
+ current_vals = []
178
+ if hasattr(self.core.analyzer, attr_name):
179
+ current_vals = getattr(self.core.analyzer, attr_name)
180
+
181
+ v = tk.StringVar(value=", ".join(current_vals if isinstance(current_vals, list) else []))
182
+ self.exclusion_vars[key] = v
183
+ tk.Entry(scrollable_frame, textvariable=v, width=80).pack(padx=20, pady=(0, 10))
184
+
185
+ def save_config():
186
+ for key, var in self.exclusion_vars.items():
187
+ items = [i.strip() for i in var.get().split(",") if i.strip()]
188
+ # Update the analyzer using the key (category name)
189
+ self.core.analyzer.update_exclusions(key, items, append=False)
190
+ messagebox.showinfo("Config", "Exclusion lists and analyzer settings updated.")
191
+
192
+ tk.Button(scrollable_frame, text="APPLY CONFIGURATION", bg="#e67e22", fg="white",
193
+ font=('Arial', 10, 'bold'), command=save_config).pack(pady=20)
194
+
195
+ tk.Label(scrollable_frame, text="MoonspicAI v12.0.0-Resilience", font=('Arial', 12, 'italic')).pack(pady=10)
196
+ tk.Label(scrollable_frame, text="Security Audit: SHA-512 Enforcement Enabled").pack()
197
+
198
+ canvas.pack(side="left", fill="both", expand=True)
199
+ scrollbar.pack(side="right", fill="y")
200
+
201
+ root.mainloop()
202
+
203
+ def start():
204
+ app = GUI()
205
+ app.start()
206
+
207
+ if __name__ == "__main__":
208
+ start()
@@ -0,0 +1,4 @@
1
+ from .core import *
2
+ from .GUI import *
3
+ from .codeServices import *
4
+ from .utils import *
@@ -0,0 +1,153 @@
1
+ import os
2
+ import json
3
+ import zipfile
4
+ import hashlib
5
+ from pathlib import Path
6
+
7
+ class Feeder:
8
+ """
9
+ Responsible for segmenting the consolidated CODE.txt and providing
10
+ clean chunks for sequential LLM analysis or Agentic retrieval.
11
+ Enhanced to support Serial ID indexing and integrity verification.
12
+ """
13
+ def __init__(self, bundle_path):
14
+ """
15
+ Initializes the Feeder with a path to the generated BUNDLE.zip.
16
+ """
17
+ self.bundle_path = Path(bundle_path)
18
+ self.segments = [] # Sequential list for iteration
19
+ self.id_map = {} # Map for direct ID access (F1, F2...)
20
+ self.stream_segments = [] # Array of dictionaries: {'code', 'fileID', 'relativepath'}
21
+ self.meta_data = {}
22
+ self.delimiter = "#MOONSPIC_CODEPACKER#"
23
+
24
+ if self.bundle_path.exists():
25
+ self._load_from_bundle()
26
+
27
+ def _load_from_bundle(self):
28
+ """
29
+ Extracts metadata and segments the CODE.txt directly from the ZIP bundle.
30
+ Uses line-by-line parsing to populate stream_segments.
31
+ """
32
+ try:
33
+ with zipfile.ZipFile(self.bundle_path, 'r') as z:
34
+ # 1. Load Metadata
35
+ if "META.json" in z.namelist():
36
+ meta_content = z.read("META.json").decode('utf-8')
37
+ self.meta_data = json.loads(meta_content)
38
+ self.delimiter = self.meta_data.get("DELIMITER", self.delimiter)
39
+
40
+ # 2. Load and Segment Code
41
+ if "CODE.txt" in z.namelist():
42
+ code_content = z.read("CODE.txt").decode('utf-8', errors='ignore')
43
+ lines = code_content.splitlines()
44
+
45
+ current_header = None
46
+ current_code_lines = []
47
+
48
+ def save_current_block():
49
+ if not current_header:
50
+ return
51
+
52
+ code_string = "\n".join(current_code_lines).strip()
53
+
54
+ # Handle MAP or File blocks
55
+ if "PROJECT_MAP" in current_header:
56
+ f_id = "MAP"
57
+ rel_path = "PROJECT_MAP"
58
+ else:
59
+ # Parse: "# DELIMITER; ID; Length; Path"
60
+ parts = [p.strip() for p in current_header.split(";")]
61
+ f_id = parts[1] if len(parts) > 1 else "UNKNOWN"
62
+ rel_path = parts[3] if len(parts) > 3 else "UNKNOWN"
63
+
64
+ segment_dict = {
65
+ 'code': code_string,
66
+ 'fileID': f_id,
67
+ 'relativepath': rel_path
68
+ }
69
+
70
+ self.stream_segments.append(segment_dict)
71
+ # Maintain legacy structures for backward compatibility
72
+ full_segment = f"{current_header}\n{code_string}"
73
+ self.segments.append(full_segment)
74
+ self.id_map[f_id] = full_segment
75
+
76
+ for line in lines:
77
+ if line.startswith(self.delimiter) or line.startswith(f"# {self.delimiter}"):
78
+ # Before starting a new block, save the previous one
79
+ save_current_block()
80
+ # Reset for the new block
81
+ current_header = line
82
+ current_code_lines = []
83
+ else:
84
+ if current_header:
85
+ current_code_lines.append(line)
86
+
87
+ # Save the final block in the file
88
+ save_current_block()
89
+
90
+ except Exception as e:
91
+ print(f"Error loading bundle for feeding: {e}")
92
+
93
+ def verify_segment_integrity(self, serial_id):
94
+ """
95
+ Checks if the segment in memory matches the SHA-512 and length
96
+ stored in the bundle's META.json.
97
+ """
98
+ if serial_id not in self.id_map:
99
+ return False, "ID not found"
100
+
101
+ segment = self.id_map[serial_id]
102
+ meta_hashes = self.meta_data.get("CODE_ID_TO_HASH", {})
103
+ meta_lengths = self.meta_data.get("CODE_ID_TO_LEN", {})
104
+
105
+ # Extract actual body content (skipping header line)
106
+ try:
107
+ body_content = segment.split("\n", 1)[1]
108
+ actual_len = len(body_content)
109
+ actual_hash = hashlib.sha512(body_content.encode('utf-8')).hexdigest()
110
+
111
+ expected_len = meta_lengths.get(serial_id)
112
+ expected_hash = meta_hashes.get(serial_id)
113
+
114
+ if actual_len == expected_len and actual_hash == expected_hash:
115
+ return True, "Integrity Verified"
116
+ else:
117
+ return False, f"Mismatch: Len({actual_len}/{expected_len}) Hash Match: {actual_hash == expected_hash}"
118
+ except Exception as e:
119
+ return False, str(e)
120
+
121
+ def get_meta_info(self):
122
+ """Returns the metadata dictionary for external reference."""
123
+ return self.meta_data
124
+
125
+ def get_next_segment(self):
126
+ """
127
+ Generator to yield segments one by one for sequential LLM processing.
128
+ """
129
+ total = len(self.segments)
130
+ for i, segment in enumerate(self.segments):
131
+ # Attempt to extract ID for metadata reporting
132
+ current_id = "UNKNOWN"
133
+ if "PROJECT_MAP" in segment:
134
+ current_id = "MAP"
135
+ elif ";" in segment:
136
+ try:
137
+ current_id = segment.split(";")[1].strip()
138
+ except: pass
139
+
140
+ yield {
141
+ "index": i,
142
+ "serial_id": current_id,
143
+ "content": segment,
144
+ "total": total,
145
+ "is_map": "PROJECT_MAP" in segment
146
+ }
147
+
148
+ def get_segment_by_serial(self, serial_id):
149
+ """
150
+ Retrieves a specific segment by its Serial ID (e.g., 'F1', 'D5', 'MAP').
151
+ Essential for Agentic AI looking for specific file context.
152
+ """
153
+ return self.id_map.get(serial_id)
@@ -0,0 +1,240 @@
1
+ import os
2
+ import hashlib
3
+ import json
4
+ import uuid
5
+ import zipfile
6
+ import shutil
7
+ import io
8
+ from pathlib import Path
9
+ from collections import OrderedDict
10
+
11
+ try: from .utils import ProjectAnalyzer
12
+ except: from utils import ProjectAnalyzer
13
+
14
+ class CodePacker:
15
+ """
16
+ Advanced engine for consolidating code and assets with simplified Serial ID tracking.
17
+ Enhanced with character-length guarding, per-file hashing, and ID-mapped path dictionaries.
18
+ """
19
+ def __init__(self,
20
+ exclude_filenames=None,
21
+ exclude_rel_paths=None,
22
+ exclude_abs_paths=None,
23
+ exclude_foldernames=None,
24
+ exclude_folder_rel_paths=None,
25
+ exclude_folder_abs_paths=None,
26
+ exclude_extensions=None):
27
+
28
+ # Initialize the ProjectAnalyzer
29
+ self.analyzer = ProjectAnalyzer(
30
+ exclude_filenames=exclude_filenames,
31
+ exclude_rel_paths=exclude_rel_paths,
32
+ exclude_abs_paths=exclude_abs_paths,
33
+ exclude_foldernames=exclude_foldernames,
34
+ exclude_folder_rel_paths=exclude_folder_rel_paths,
35
+ exclude_folder_abs_paths=exclude_folder_abs_paths,
36
+ exclude_extensions=exclude_extensions
37
+ )
38
+
39
+ self.delimiter = "#MOONSPIC_CODEPACKER#"
40
+ self.PathOflastPack = None
41
+ self.PathOfLastUnpack = None
42
+
43
+ # Data structures for tracking
44
+ self.code_files_registry = OrderedDict()
45
+ self.asset_files_registry = OrderedDict()
46
+ self.folder_registry = OrderedDict()
47
+ self.asset_folder_registry = OrderedDict()
48
+
49
+ self.last_visual_tree = ""
50
+
51
+ def is_binary(self, file_path):
52
+ """Check if a file is binary by looking for null bytes."""
53
+ try:
54
+ with open(file_path, 'rb') as f:
55
+ chunk = f.read(1024)
56
+ if b'\x00' in chunk:
57
+ return True
58
+ chunk.decode('utf-8')
59
+ return False
60
+ except (UnicodeDecodeError, PermissionError):
61
+ return True
62
+
63
+ def get_meta_description(self):
64
+ """Returns a dictionary describing each key used in META.json."""
65
+ return {
66
+ "PROJECT_NAME": "The name of the source directory being packed.",
67
+ "DELIMITER": "The unique string used to separate code blocks in CODE.txt.",
68
+ "CODE_FILES_ID": "A list of unique serial IDs (F1, F2...) assigned to text-based files.",
69
+ "FOLDER_IDS": "A list of unique serial IDs (D1, D2...) assigned to every folder.",
70
+ "ASSET_FILES_ID": "A list of unique serial IDs assigned to binary files stored in Assets.zip.",
71
+ "ASSET_FOLDERS_ID": "A list of IDs for folders that specifically contain asset files.",
72
+ "ABS_PATH_TREE": "Dictionary mapping every unique ID (File/Folder) to its absolute path.",
73
+ "REL_PATH_TREE": "Dictionary mapping every unique ID (File/Folder) to its relative path.",
74
+ "VISUAL_TREE": "A visual ASCII representation of the project structure.",
75
+ "CODE_ID_TO_LEN": "The exact character length of each code file for precise restoration.",
76
+ "CODE_ID_TO_HASH": "SHA-512 hash of each individual code file for integrity verification.",
77
+ "CONTENT_HASH": "The SHA-512 integrity hash of the project source."
78
+ }
79
+
80
+ def pack(self, src_path, out_dir):
81
+ if not src_path or not out_dir: return "Error: Missing paths"
82
+ src, out = Path(src_path).resolve(), Path(out_dir).resolve()
83
+
84
+ # Reset registries and counters
85
+ self.code_files_registry.clear()
86
+ self.asset_files_registry.clear()
87
+ self.folder_registry.clear()
88
+ self.asset_folder_registry.clear()
89
+
90
+ file_serial = 1
91
+ dir_serial = 1
92
+
93
+ # Capture visual tree and content hash
94
+ self.last_visual_tree = self.analyzer.build_hierarchy(src)
95
+ project_hash = self.analyzer.calculate_content_hash(src)
96
+
97
+ stage = out / f"STAGE_{uuid.uuid4().hex[:6]}"
98
+ stage.mkdir(parents=True, exist_ok=True)
99
+
100
+ code_blocks = [f"{self.delimiter} PROJECT_MAP\n{self.last_visual_tree}"]
101
+ assets_buffer = io.BytesIO()
102
+
103
+ # Master Path Dictionaries
104
+ abs_path_tree = {}
105
+ rel_path_tree = {}
106
+
107
+ with zipfile.ZipFile(assets_buffer, 'w') as az:
108
+ for r, dirs, files in os.walk(src):
109
+ dirs.sort()
110
+ files.sort()
111
+
112
+ curr_dir_path = Path(r)
113
+ if self.analyzer._is_excluded(curr_dir_path, src):
114
+ continue
115
+
116
+ dir_rel = str(curr_dir_path.relative_to(src))
117
+ dir_id = f"D{dir_serial}"
118
+ dir_serial += 1
119
+
120
+ # Register directory in master path trees
121
+ abs_path_tree[dir_id] = str(curr_dir_path)
122
+ rel_path_tree[dir_id] = dir_rel
123
+ self.folder_registry[dir_id] = {"abs": str(curr_dir_path), "rel": dir_rel}
124
+
125
+ has_assets = False
126
+ for f in files:
127
+ fp = Path(r) / f
128
+ if self.analyzer._is_excluded(fp, src): continue
129
+ if "_BUNDLE.zip" in f or "STAGE_" in str(fp): continue
130
+
131
+ rel_path = str(fp.relative_to(src))
132
+ abs_path = str(fp.resolve())
133
+
134
+ f_id = f"F{file_serial}"
135
+ file_serial += 1
136
+
137
+ # Register file in master path trees
138
+ abs_path_tree[f_id] = abs_path
139
+ rel_path_tree[f_id] = rel_path
140
+
141
+ if not self.is_binary(fp):
142
+ try:
143
+ content = fp.read_text(encoding='utf-8', errors='ignore')
144
+ c_len = len(content)
145
+ file_hash = hashlib.sha512(content.encode('utf-8')).hexdigest()
146
+
147
+ def_line = f"# {self.delimiter}; {f_id}; {c_len}; {rel_path}"
148
+ code_blocks.append(f"{def_line}\n{content}")
149
+
150
+ self.code_files_registry[f_id] = {
151
+ "abs": abs_path,
152
+ "rel": rel_path,
153
+ "len": c_len,
154
+ "hash": file_hash
155
+ }
156
+ except Exception:
157
+ az.write(fp, rel_path)
158
+ self.asset_files_registry[f_id] = {"abs": abs_path, "rel": rel_path}
159
+ has_assets = True
160
+ else:
161
+ az.write(fp, rel_path)
162
+ self.asset_files_registry[f_id] = {"abs": abs_path, "rel": rel_path}
163
+ has_assets = True
164
+
165
+ if has_assets:
166
+ self.asset_folder_registry[dir_id] = self.folder_registry[dir_id]
167
+
168
+ meta = {
169
+ "PROJECT_NAME": src.name,
170
+ "DELIMITER": self.delimiter,
171
+ "CONTENT_HASH": project_hash,
172
+ "CODE_FILES_ID": list(self.code_files_registry.keys()),
173
+ "FOLDER_IDS": list(self.folder_registry.keys()),
174
+ "ASSET_FILES_ID": list(self.asset_files_registry.keys()),
175
+ "ASSET_FOLDERS_ID": list(self.asset_folder_registry.keys()),
176
+ "ABS_PATH_TREE": abs_path_tree,
177
+ "REL_PATH_TREE": rel_path_tree,
178
+ "VISUAL_TREE": self.last_visual_tree,
179
+ "CODE_ID_TO_LEN": {k: v["len"] for k, v in self.code_files_registry.items()},
180
+ "CODE_ID_TO_HASH": {k: v["hash"] for k, v in self.code_files_registry.items()},
181
+ "DESCRIPTION": self.get_meta_description()
182
+ }
183
+
184
+ (stage / "CODE.txt").write_text("\n".join(code_blocks), encoding='utf-8')
185
+ (stage / "META.json").write_text(json.dumps(meta, indent=4), encoding='utf-8')
186
+ (stage / "Assets.zip").write_bytes(assets_buffer.getvalue())
187
+
188
+ final_zip = out / f"{src.name}_BUNDLE.zip"
189
+ with zipfile.ZipFile(final_zip, 'w') as fz:
190
+ for f in ["CODE.txt", "META.json", "Assets.zip"]:
191
+ fz.write(stage / f, f)
192
+
193
+ shutil.rmtree(stage)
194
+ self.PathOflastPack = final_zip
195
+ return str(final_zip)
196
+
197
+ def unpack(self, zip_path, target_dir):
198
+ base_target = Path(target_dir).resolve()
199
+ temp = base_target / f"TEMP_{uuid.uuid4().hex[:6]}"
200
+ temp.mkdir(parents=True, exist_ok=True)
201
+
202
+ shutil.unpack_archive(zip_path, temp)
203
+ meta = json.loads((temp / "META.json").read_text(encoding='utf-8'))
204
+
205
+ proj_folder = base_target / meta.get("PROJECT_NAME", "restored_project")
206
+ if proj_folder.exists(): shutil.rmtree(proj_folder)
207
+ proj_folder.mkdir(parents=True, exist_ok=True)
208
+
209
+ if (temp / "Assets.zip").exists():
210
+ with zipfile.ZipFile(temp / "Assets.zip", 'r') as az:
211
+ az.extractall(proj_folder)
212
+
213
+ if (temp / "CODE.txt").exists():
214
+ content = (temp / "CODE.txt").read_text(encoding='utf-8')
215
+ delim = meta.get("DELIMITER", self.delimiter)
216
+
217
+ parts = content.split(delim)
218
+ for part in parts:
219
+ part = part.lstrip()
220
+ if not part or "PROJECT_MAP" in part: continue
221
+
222
+ if part.startswith(";"):
223
+ try:
224
+ header_line, remaining_body = part.split("\n", 1)
225
+ header_parts = [p.strip() for p in header_line.split(";")]
226
+
227
+ expected_len = int(header_parts[2])
228
+ rel_path = header_parts[3]
229
+
230
+ actual_body = remaining_body[:expected_len]
231
+
232
+ out_f = proj_folder / rel_path
233
+ out_f.parent.mkdir(parents=True, exist_ok=True)
234
+ out_f.write_text(actual_body, encoding='utf-8')
235
+ except Exception:
236
+ continue
237
+
238
+ shutil.rmtree(temp)
239
+ self.PathOfLastUnpack = proj_folder
240
+ return str(proj_folder)
@@ -0,0 +1,231 @@
1
+ import os
2
+ import shutil
3
+ import json
4
+ import zipfile
5
+ import unittest
6
+ import argparse
7
+ import sys
8
+ from pathlib import Path
9
+
10
+ # Import project components
11
+ try: from .core import CodePacker
12
+ except: from core import CodePacker
13
+
14
+ try: from .codeServices import Feeder
15
+ except: from codeServices import Feeder
16
+
17
+ class TestMoonspicSystem(unittest.TestCase):
18
+ @classmethod
19
+ def setUpClass(cls):
20
+ """Create a dedicated working area for the entire test suite."""
21
+ cls.working_area = Path("working_area").resolve()
22
+ if cls.working_area.exists():
23
+ shutil.rmtree(cls.working_area)
24
+ cls.working_area.mkdir(parents=True)
25
+
26
+ def setUp(self):
27
+ """Set up an isolated project structure for each test case within the working area."""
28
+ self.test_dir = self.working_area / self._testMethodName
29
+ if self.test_dir.exists():
30
+ shutil.rmtree(self.test_dir)
31
+ self.test_dir.mkdir()
32
+
33
+ # CRITICAL: Keep Project and Output strictly separated
34
+ self.project_dir = self.test_dir / "SourceProject"
35
+ self.project_dir.mkdir()
36
+
37
+ (self.project_dir / "main.py").write_text("print('hello world')", encoding='utf-8')
38
+ (self.project_dir / "utils.js").write_text("console.log('test');", encoding='utf-8')
39
+ (self.project_dir / "README.md").write_text("# Mock Project\nThis is for testing.", encoding='utf-8')
40
+
41
+ asset_dir = self.project_dir / "assets"
42
+ asset_dir.mkdir()
43
+ (asset_dir / "image.png").write_bytes(os.urandom(1024))
44
+
45
+ self.output_dir = self.test_dir / "bundle_output"
46
+ self.output_dir.mkdir()
47
+
48
+ def test_analyzer_hash_consistency(self):
49
+ """Test if the ProjectAnalyzer generates the same hash and produces output files."""
50
+ mock_files = list(self.project_dir.rglob('*'))
51
+ self.assertTrue(len(mock_files) > 0, "Test logic error: Mock source directory is empty.")
52
+
53
+ packer = CodePacker()
54
+ hash1 = packer.analyzer.calculate_content_hash(self.project_dir)
55
+ bundle_path = packer.pack(self.project_dir, self.output_dir)
56
+
57
+ output_contents = list(self.output_dir.glob('*'))
58
+ self.assertTrue(len(output_contents) > 0, "Output directory is empty after pack operation.")
59
+ self.assertTrue(os.path.exists(bundle_path), f"Bundle file {bundle_path} was not created.")
60
+
61
+ hash2 = packer.analyzer.calculate_content_hash(self.project_dir)
62
+ self.assertTrue(hash1 and len(hash1) > 0, "Analyzer returned an empty hash.")
63
+ self.assertEqual(hash1, hash2, "Deterministic hashing failed: hashes should be identical.")
64
+
65
+ def test_packer_meta_format(self):
66
+ """Verify that META.json uses all CAPS keys and includes the descriptive dictionary."""
67
+ packer = CodePacker()
68
+ bundle_path = Path(packer.pack(self.project_dir, self.output_dir))
69
+
70
+ with zipfile.ZipFile(bundle_path, 'r') as z:
71
+ meta_content = z.read("META.json").decode('utf-8')
72
+ meta_data = json.loads(meta_content)
73
+
74
+ required_keys = [
75
+ "PROJECT_NAME", "DELIMITER", "CODE_FILES_ID",
76
+ "FOLDER_IDS", "CONTENT_HASH", "DESCRIPTION"
77
+ ]
78
+ for key in required_keys:
79
+ self.assertIn(key, meta_data, f"Missing required CAPS key: {key}")
80
+
81
+ self.assertIsInstance(meta_data["DESCRIPTION"], dict)
82
+
83
+ def test_packer_header_format(self):
84
+ """Verify the definition line format in CODE.txt matches the specification."""
85
+ packer = CodePacker()
86
+ bundle_path = Path(packer.pack(self.project_dir, self.output_dir))
87
+
88
+ with zipfile.ZipFile(bundle_path, 'r') as z:
89
+ code_text = z.read("CODE.txt").decode('utf-8')
90
+ # Updated expectation: Look for the delimiter followed by the new Serial ID prefix 'F'
91
+ # Instead of 'CODE_', we now use 'F1', 'F2', etc.
92
+ expected_prefix = f"# {packer.delimiter}; F"
93
+ self.assertIn(expected_prefix, code_text, "CODE.txt does not contain correctly formatted header lines.")
94
+
95
+ def test_integrity_match(self):
96
+ """Test the full cycle: Pack -> Unpack -> Compare Hashes."""
97
+ packer = CodePacker()
98
+
99
+ # 1. Calculate Original Hash
100
+ h1 = packer.analyzer.calculate_content_hash(self.project_dir)
101
+
102
+ # 2. Pack to bundle_output/
103
+ bundle = packer.pack(self.project_dir, self.output_dir)
104
+
105
+ # 3. Unpack to restored_site/
106
+ restoration_path = self.test_dir / "restored_site"
107
+ restoration_path.mkdir()
108
+ restored_dir = packer.unpack(bundle, restoration_path)
109
+
110
+ # 4. Calculate Restored Hash
111
+ h2 = packer.analyzer.calculate_content_hash(restored_dir)
112
+
113
+ # Debugging info if it fails
114
+ if h1 != h2:
115
+ print("\n--- Integrity Debug ---")
116
+ print(f"Original Dir: {self.project_dir}")
117
+ print(f"Restored Dir: {restored_dir}")
118
+ orig_files = sorted([str(p.relative_to(self.project_dir)) for p in self.project_dir.rglob('*')])
119
+ rest_files = sorted([str(p.relative_to(restored_dir)) for p in Path(restored_dir).rglob('*')])
120
+ print(f"Original Files: {len(orig_files)}")
121
+ print(f"Restored Files: {len(rest_files)}")
122
+ if orig_files != rest_files:
123
+ print(f"File list mismatch! Missing/Extra: {set(orig_files) ^ set(rest_files)}")
124
+
125
+ self.assertEqual(h1, h2, "Integrity Mismatch: The restored project content has been altered.")
126
+
127
+ def test_feeder_segmentation(self):
128
+ """Verify the Feeder correctly parses the bundle and yields project segments."""
129
+ packer = CodePacker()
130
+ bundle = packer.pack(self.project_dir, self.output_dir)
131
+
132
+ feeder = Feeder(bundle)
133
+ segments = list(feeder.get_next_segment())
134
+
135
+ self.assertEqual(len(segments), 4, f"Expected 4 segments, found {len(segments)}")
136
+ self.assertTrue(segments[0]["is_map"], "The first segment yielded should be the PROJECT_MAP.")
137
+
138
+
139
+ def test_feeder_stream_segments(self):
140
+ """
141
+ Refactored test: Verifies stream_segments logic AND outputs each segment
142
+ to a physical file in the 'bundle_output' directory for inspection.
143
+ """
144
+ bundle_path = self.output_dir / "manual_test_bundle.zip"
145
+ delimiter = "#MOONSPIC_CODEPACKER#"
146
+
147
+ # 1. PREPARE MOCK DATA
148
+ meta = {
149
+ "DELIMITER": delimiter,
150
+ "PROJECT_NAME": "ManualFeederCheck",
151
+ "CODE_ID_TO_LEN": {"F1": 21, "F2": 25},
152
+ "CODE_ID_TO_HASH": {"F1": "hash1", "F2": "hash2"}
153
+ }
154
+
155
+ code_content = (
156
+ f"{delimiter} PROJECT_MAP\n"
157
+ "|-- app.py\n"
158
+ "|-- utils.py\n"
159
+ f"# {delimiter}; F1; 21; app.py\n"
160
+ "print('hello world')\n"
161
+ f"# {delimiter}; F2; 25; utils.py\n"
162
+ "def add(a, b): return a+b"
163
+ )
164
+
165
+ with zipfile.ZipFile(bundle_path, 'w') as z:
166
+ z.writestr("META.json", json.dumps(meta))
167
+ z.writestr("CODE.txt", code_content)
168
+
169
+ # 2. RUN FEEDER
170
+ feeder = Feeder(bundle_path)
171
+
172
+ # 3. SAVE SEGMENTS TO FILES FOR MANUAL CHECK
173
+ # Path: working_area/test_feeder_stream_segments/bundle_output/extracted_segments/
174
+ extract_dir = self.output_dir / "extracted_segments"
175
+ extract_dir.mkdir(exist_ok=True)
176
+
177
+ print(f"\n--- Exporting {len(feeder.stream_segments)} segments to {extract_dir} ---")
178
+
179
+ for idx, segment in enumerate(feeder.stream_segments):
180
+ f_id = segment['fileID']
181
+ # Create a filename: e.g., "0_MAP.txt", "1_F1.txt"
182
+ filename = f"{idx}_{f_id}.txt"
183
+ file_path = extract_dir / filename
184
+ file_path.write_text(segment['code'], encoding='utf-8')
185
+ print(f" Saved: {filename}")
186
+
187
+ # 4. ASSERTIONS
188
+ self.assertEqual(len(feeder.stream_segments), 3, "Should have 3 segments: MAP, F1, and F2")
189
+ self.assertEqual(feeder.stream_segments[0]['fileID'], "MAP")
190
+ self.assertEqual(feeder.stream_segments[1]['fileID'], "F1")
191
+ self.assertEqual(feeder.stream_segments[2]['fileID'], "F2")
192
+
193
+ print(f"\n✅ SUCCESS: Inspect the '{extract_dir}' folder for results.")
194
+
195
+
196
+ def list_tests(suite):
197
+ test_methods = []
198
+ for test in suite:
199
+ if isinstance(test, unittest.TestSuite):
200
+ test_methods.extend(list_tests(test))
201
+ else:
202
+ test_methods.append(test._testMethodName)
203
+ return test_methods
204
+
205
+ if __name__ == "__main__":
206
+ parser = argparse.ArgumentParser(description="Moonspic System Test Runner")
207
+ parser.add_argument("-n", "--number", type=int, help="Run a specific test by its index number")
208
+ parser.add_argument("-l", "--list", action="store_true", help="List all available tests with their index numbers")
209
+
210
+ args = parser.parse_args()
211
+
212
+ loader = unittest.TestLoader()
213
+ suite = loader.loadTestsFromTestCase(TestMoonspicSystem)
214
+ all_test_names = list_tests(suite)
215
+
216
+ if args.list:
217
+ print("\nAvailable Tests:")
218
+ for idx, name in enumerate(all_test_names, 1):
219
+ print(f" [{idx}] {name}")
220
+ sys.exit(0)
221
+
222
+ if args.number is not None:
223
+ if 1 <= args.number <= len(all_test_names):
224
+ test_name = all_test_names[args.number - 1]
225
+ specific_suite = loader.loadTestsFromName(f"{__name__}.TestMoonspicSystem.{test_name}")
226
+ unittest.TextTestRunner(verbosity=2).run(specific_suite)
227
+ else:
228
+ print(f"❌ Error: Invalid test number.")
229
+ sys.exit(1)
230
+ else:
231
+ unittest.TextTestRunner(verbosity=2).run(suite)
@@ -0,0 +1,177 @@
1
+ import os
2
+ import hashlib
3
+ from pathlib import Path
4
+
5
+ class ProjectAnalyzer:
6
+ """
7
+ Utility class to analyze and visualize project structures with granular exclusions.
8
+ Designed to provide deterministic hashing for integrity checks.
9
+ """
10
+
11
+ def __init__(self,
12
+ exclude_filenames=None,
13
+ exclude_rel_paths=None,
14
+ exclude_abs_paths=None,
15
+ exclude_foldernames=None,
16
+ exclude_folder_rel_paths=None,
17
+ exclude_folder_abs_paths=None,
18
+ exclude_extensions=None):
19
+ """
20
+ Initializes the analyzer with specific exclusion criteria.
21
+ """
22
+ # 1. File Exclusions (Standardized System Ghost Files)
23
+ self.excl_file_names = set(exclude_filenames or {
24
+ '.DS_Store', 'Thumbs.db', '.gitignore', '.gitattributes', 'desktop.ini',
25
+ '.python-version', 'META.json', 'CODE.txt' # Added tool artifacts
26
+ })
27
+ self.excl_file_rel_paths = set(exclude_rel_paths or [])
28
+ self.excl_file_abs_paths = set(exclude_abs_paths or [])
29
+
30
+ # 2. Folder Exclusions (Standardized System/Tool Artifacts)
31
+ self.excl_folder_names = set(exclude_foldernames or {
32
+ '.git', '__pycache__', '.venv', 'node_modules', '.idea', '.vscode',
33
+ '.pytest_cache', 'env', 'venv', 'target', 'dist', 'build'
34
+ })
35
+ # Explicitly exclude common test/output directories to avoid "Self-Hashing"
36
+ self.excl_folder_rel_paths = set(exclude_folder_rel_paths or {
37
+ 'bundle_output', 'working_area', 'output', 'restored_site', 'restored_project'
38
+ })
39
+ self.excl_folder_abs_paths = set(exclude_folder_abs_paths or [])
40
+
41
+ # 3. Extension Exclusions
42
+ self.excl_extensions = set(exclude_extensions or {
43
+ '.pyc', '.pyo', '.pyd', '.tmp', '.log', '.bak', '.zip', '.tar', '.gz'
44
+ })
45
+
46
+ def update_exclusions(self, category, items, append=True):
47
+ """
48
+ Updates the exclusion lists dynamically.
49
+ """
50
+ attr_map = {
51
+ 'file_names': 'excl_file_names',
52
+ 'file_rel_paths': 'excl_file_rel_paths',
53
+ 'file_abs_paths': 'excl_file_abs_paths',
54
+ 'folder_names': 'excl_folder_names',
55
+ 'folder_rel_paths': 'excl_folder_rel_paths',
56
+ 'folder_abs_paths': 'excl_folder_abs_paths',
57
+ 'extensions': 'excl_extensions'
58
+ }
59
+
60
+ if category in attr_map:
61
+ target_attr = attr_map[category]
62
+ new_set = set(items)
63
+ if append:
64
+ current_set = getattr(self, target_attr)
65
+ setattr(self, target_attr, current_set.union(new_set))
66
+ else:
67
+ setattr(self, target_attr, new_set)
68
+
69
+ def _is_excluded(self, path, root_path):
70
+ """
71
+ Internal check to determine if a file or folder should be excluded.
72
+ """
73
+ path = Path(path)
74
+ root_path = Path(root_path)
75
+ abs_path = str(path.resolve())
76
+ try:
77
+ rel_path_obj = path.relative_to(root_path)
78
+ rel_path = str(rel_path_obj)
79
+ rel_parts = rel_path_obj.parts
80
+ except ValueError:
81
+ rel_path = ""
82
+ rel_parts = []
83
+
84
+ # Check if any part of the path (parent folders) is in an exclusion list
85
+ for part in rel_parts:
86
+ if part in self.excl_folder_names or part in self.excl_folder_rel_paths:
87
+ return True
88
+
89
+ if path.is_dir():
90
+ if path.name in self.excl_folder_names: return True
91
+ if rel_path in self.excl_folder_rel_paths: return True
92
+ if abs_path in self.excl_folder_abs_paths: return True
93
+ else:
94
+ if path.name in self.excl_file_names: return True
95
+ if rel_path in self.excl_file_rel_paths: return True
96
+ if abs_path in self.excl_file_abs_paths: return True
97
+ if path.suffix.lower() in self.excl_extensions: return True
98
+
99
+ return False
100
+
101
+ def calculate_content_hash(self, directory, debug=False):
102
+ """
103
+ Generates a SHA-512 hash of the directory content.
104
+ Ensures the root directory name itself does not affect the hash.
105
+ """
106
+ hasher = hashlib.sha512()
107
+ base = Path(directory).resolve()
108
+
109
+ files = []
110
+ for fp in base.rglob('*'):
111
+ # Only hash files; directory structure is captured by relative paths
112
+ if fp.is_file() and not self._is_excluded(fp, base):
113
+ files.append(fp)
114
+
115
+ # Sort files by POSIX relative path for absolute determinism.
116
+ # This ensures 'restored_site/a.txt' and 'MockProject/a.txt'
117
+ # both use 'a.txt' as the key.
118
+ sorted_files = sorted(files, key=lambda x: x.relative_to(base).as_posix())
119
+
120
+ if debug:
121
+ print(f"\n--- Hashing Debug for Root: {base.name} ---")
122
+
123
+ for fp in sorted_files:
124
+ try:
125
+ # Force forward slashes and ensure the root name isn't part of this string
126
+ rel_path_string = fp.relative_to(base).as_posix()
127
+
128
+ if debug:
129
+ print(f"Hashing Path: {rel_path_string}")
130
+
131
+ # Update hash with the path
132
+ hasher.update(rel_path_string.encode('utf-8'))
133
+
134
+ # Update hash with the content
135
+ with open(fp, 'rb') as f:
136
+ while chunk := f.read(8192):
137
+ hasher.update(chunk)
138
+ except (PermissionError, OSError) as e:
139
+ if debug:
140
+ print(f"Skipping {fp}: {e}")
141
+ continue
142
+
143
+ return hasher.hexdigest()
144
+
145
+ def build_hierarchy(self, root_path, current_path=None, indent="", is_last=True, prefix=""):
146
+ """
147
+ Recursively builds a visual tree string of the directory structure.
148
+ """
149
+ if current_path is None:
150
+ current_path = Path(root_path)
151
+ root_path = current_path
152
+
153
+ path = Path(current_path)
154
+
155
+ if self._is_excluded(path, root_path) and prefix != "":
156
+ return ""
157
+
158
+ if prefix == "":
159
+ output = f"{path.name}/\n"
160
+ else:
161
+ output = f"{indent}{prefix} {path.name}{'/' if path.is_dir() else ''}\n"
162
+
163
+ if path.is_dir():
164
+ try:
165
+ items = [item for item in path.iterdir() if not self._is_excluded(item, root_path)]
166
+ items.sort(key=lambda x: (not x.is_dir(), x.name.lower()))
167
+ except PermissionError:
168
+ return f"{indent}{prefix} [Permission Denied]\n"
169
+
170
+ for i, item in enumerate(items):
171
+ last_item = (i == len(items) - 1)
172
+ new_prefix = "└──" if last_item else "├──"
173
+ new_indent = indent + (" " if is_last else "│ ")
174
+ pass_indent = "" if prefix == "" else new_indent
175
+ output += self.build_hierarchy(root_path, item, pass_indent, last_item, new_prefix)
176
+
177
+ return output
@@ -0,0 +1,8 @@
1
+ Metadata-Version: 2.4
2
+ Name: codepacker
3
+ Version: 12.6.0
4
+ Summary: Enhanced Updates.
5
+ Author: BMI-MoonspicAI
6
+ Description-Content-Type: text/markdown
7
+
8
+ # Moonspic Codepacker
@@ -0,0 +1,13 @@
1
+ README.md
2
+ pyproject.toml
3
+ src/codepacker/GUI.py
4
+ src/codepacker/__init__.py
5
+ src/codepacker/codeServices.py
6
+ src/codepacker/core.py
7
+ src/codepacker/tests.py
8
+ src/codepacker/utils.py
9
+ src/codepacker.egg-info/PKG-INFO
10
+ src/codepacker.egg-info/SOURCES.txt
11
+ src/codepacker.egg-info/dependency_links.txt
12
+ src/codepacker.egg-info/entry_points.txt
13
+ src/codepacker.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ codepacker-gui = codepacker.GUI:start
@@ -0,0 +1 @@
1
+ codepacker