ft-hubspot-workflow-backup 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,14 @@
1
+ from .client import HubSpotClient
2
+ from .backup import backup_all_flows, get_timestamp, slugify, verify_backups
3
+ from .restore import restore_flow
4
+
5
+ __version__ = "0.1.4"
6
+
7
+ __all__ = [
8
+ "HubSpotClient",
9
+ "backup_all_flows",
10
+ "restore_flow",
11
+ "verify_backups",
12
+ "get_timestamp",
13
+ "slugify",
14
+ ]
@@ -0,0 +1,315 @@
1
+ import argparse
2
+ import hashlib
3
+ import json
4
+ import re
5
+ import sys
6
+ from datetime import datetime, timezone
7
+ from pathlib import Path
8
+ from typing import Optional, Union
9
+
10
+ import requests
11
+
12
+ from .client import HubSpotClient
13
+
14
+
15
+ def get_filter_sort_key(f: dict) -> tuple:
16
+ """Get a sort key for a filter based on property, type, and value."""
17
+ operation = f.get("operation", {})
18
+ op_value = operation.get("value", "") or str(operation.get("values", []))
19
+ return (
20
+ f.get("property", ""),
21
+ f.get("filterType", ""),
22
+ op_value,
23
+ )
24
+
25
+
26
+ def get_filter_branch_sort_key(branch: dict) -> tuple:
27
+ """Get a sort key for a filter branch based on its first filter."""
28
+ filters = branch.get("filters", [])
29
+ if filters:
30
+ first_filter = filters[0]
31
+ operation = first_filter.get("operation", {})
32
+ op_value = operation.get("value", "") or str(operation.get("values", []))
33
+ return (
34
+ first_filter.get("property", ""),
35
+ first_filter.get("filterType", ""),
36
+ op_value,
37
+ )
38
+ return ("", "", "")
39
+
40
+
41
+ def sort_filters(obj: dict | list) -> dict | list:
42
+ """Recursively sort filters and filter branches for consistent output."""
43
+ if isinstance(obj, dict):
44
+ for key, value in obj.items():
45
+ if key == "filters" and isinstance(value, list):
46
+ obj[key] = sorted(value, key=get_filter_sort_key)
47
+ elif key == "reEnrollmentTriggersFilterBranches" and isinstance(
48
+ value, list
49
+ ):
50
+ for branch in value:
51
+ sort_filters(branch)
52
+ obj[key] = sorted(value, key=get_filter_branch_sort_key)
53
+ else:
54
+ sort_filters(value)
55
+ elif isinstance(obj, list):
56
+ for item in obj:
57
+ sort_filters(item)
58
+ return obj
59
+
60
+
61
+ def normalize_flow(flow: dict) -> dict:
62
+ """Normalize flow data for consistent serialization."""
63
+ if "dataSources" in flow and isinstance(flow["dataSources"], list):
64
+ flow["dataSources"] = sorted(
65
+ flow["dataSources"],
66
+ key=lambda ds: (
67
+ ds.get("objectTypeId", ""),
68
+ ds.get("associationTypeId", 0),
69
+ ds.get("name", ""),
70
+ ),
71
+ )
72
+ sort_filters(flow)
73
+ return flow
74
+
75
+
76
+ def slugify(name: str, max_length: int = 80) -> str:
77
+ """
78
+ Convert flow name to filesystem-safe slug.
79
+
80
+ Args:
81
+ name: Flow name to slugify.
82
+ max_length: Maximum slug length.
83
+
84
+ Returns:
85
+ Lowercase slug with only alphanumeric, dash, underscore.
86
+ """
87
+ if not name:
88
+ return "unnamed-flow"
89
+ slug = name.lower()
90
+ slug = re.sub(r"\s+", "-", slug)
91
+ slug = re.sub(r"[^a-z0-9_-]", "", slug)
92
+ if not slug:
93
+ slug = "unnamed-flow"
94
+ if len(slug) > max_length:
95
+ slug = slug[:max_length]
96
+ return slug
97
+
98
+
99
+ def get_timestamp() -> str:
100
+ """
101
+ Get current UTC timestamp for backup naming.
102
+
103
+ Returns:
104
+ Timestamp string in YYYY_MM_DD_HHMMSS format.
105
+ """
106
+ return datetime.now(timezone.utc).strftime("%Y_%m_%d_%H%M%S")
107
+
108
+
109
+ def backup_all_flows(
110
+ token: Optional[str] = None,
111
+ output_dir: Optional[Union[str, Path]] = None,
112
+ client: Optional[HubSpotClient] = None,
113
+ use_date_dir: bool = False,
114
+ use_date_prefix: bool = False,
115
+ ) -> Path:
116
+ """
117
+ Backup all HubSpot automation flows to JSON files.
118
+
119
+ Args:
120
+ token: HubSpot token. Falls back to HUBSPOT_AUTOMATION_TOKEN env var.
121
+ output_dir: Directory for snapshots. Defaults to ./snapshots/.
122
+ client: Pre-configured HubSpotClient instance.
123
+ use_date_dir: If True, create a timestamped subdirectory for this run.
124
+ use_date_prefix: If True, prefix each workflow filename with timestamp.
125
+
126
+ Returns:
127
+ Path to the created snapshot directory.
128
+ """
129
+ if client is None:
130
+ client = HubSpotClient(token=token)
131
+
132
+ timestamp = get_timestamp()
133
+
134
+ if output_dir is None:
135
+ output_dir = Path.cwd() / "snapshots"
136
+ else:
137
+ output_dir = Path(output_dir)
138
+
139
+ if use_date_dir:
140
+ run_dir = output_dir / timestamp
141
+ else:
142
+ run_dir = output_dir
143
+ run_dir.mkdir(parents=True, exist_ok=True)
144
+
145
+ flows = client.list_flows()
146
+
147
+ if not flows:
148
+ return run_dir
149
+
150
+ index_entries = []
151
+
152
+ for flow in flows:
153
+ flow_id = str(flow.get("id"))
154
+ name = flow.get("name") or f"flow-{flow_id}"
155
+ slug = slugify(name)
156
+
157
+ try:
158
+ details = client.get_flow(flow_id)
159
+ except requests.exceptions.HTTPError:
160
+ continue
161
+
162
+ details = normalize_flow(details)
163
+
164
+ if use_date_prefix:
165
+ filename = f"{timestamp}_{slug}.json"
166
+ else:
167
+ filename = f"{slug}.json"
168
+ filepath = run_dir / filename
169
+
170
+ with filepath.open("w", encoding="utf-8") as f:
171
+ json.dump(details, f, indent=2, sort_keys=True)
172
+
173
+ index_entries.append({
174
+ "id": flow_id,
175
+ "name": name,
176
+ "filename": filename,
177
+ "isEnabled": details.get("isEnabled"),
178
+ "flowType": details.get("flowType"),
179
+ "type": details.get("type"),
180
+ })
181
+
182
+ for entry in index_entries:
183
+ filepath = run_dir / entry["filename"]
184
+ content = filepath.read_bytes()
185
+ entry["hash"] = hashlib.sha256(content).hexdigest()
186
+
187
+ index_path = run_dir / "_index.json"
188
+ with index_path.open("w", encoding="utf-8") as f:
189
+ json.dump({
190
+ "timestamp": timestamp,
191
+ "flows": index_entries,
192
+ }, f, indent=2)
193
+
194
+ return run_dir
195
+
196
+
197
+ def verify_backups(snapshot_dir: Optional[Union[str, Path]] = None) -> dict:
198
+ """
199
+ Verify all workflow backups against their stored SHA-256 hashes.
200
+
201
+ Args:
202
+ snapshot_dir: Directory containing snapshots. Defaults to ./snapshots/.
203
+
204
+ Returns:
205
+ Dict with 'verified', 'failed', and 'missing' lists of filenames.
206
+ """
207
+ if snapshot_dir is None:
208
+ snapshot_path = Path.cwd() / "snapshots"
209
+ else:
210
+ snapshot_path = Path(snapshot_dir)
211
+
212
+ index_path = snapshot_path / "_index.json"
213
+ if not index_path.exists():
214
+ raise FileNotFoundError(f"Index file not found: {index_path}")
215
+
216
+ with index_path.open("r", encoding="utf-8") as f:
217
+ index = json.load(f)
218
+
219
+ results: dict = {"verified": [], "failed": [], "missing": []}
220
+
221
+ for flow in index.get("flows", []):
222
+ filename = flow.get("filename")
223
+ expected_hash = flow.get("hash")
224
+
225
+ if not filename or not expected_hash:
226
+ continue
227
+
228
+ filepath = snapshot_path / filename
229
+ if not filepath.exists():
230
+ results["missing"].append(filename)
231
+ continue
232
+
233
+ actual_hash = hashlib.sha256(filepath.read_bytes()).hexdigest()
234
+ if actual_hash == expected_hash:
235
+ results["verified"].append(filename)
236
+ else:
237
+ results["failed"].append(filename)
238
+
239
+ return results
240
+
241
+
242
+ def main() -> None:
243
+ """CLI entry point for workflows-backup command."""
244
+ parser = argparse.ArgumentParser(
245
+ description="Backup HubSpot automation workflows to JSON files."
246
+ )
247
+ parser.add_argument(
248
+ "-o", "--output-dir",
249
+ type=str,
250
+ default=None,
251
+ help="Output directory for snapshots (default: ./snapshots/)"
252
+ )
253
+ parser.add_argument(
254
+ "--use-date-dir",
255
+ action="store_true",
256
+ help="Create a timestamped subdirectory for this backup run"
257
+ )
258
+ parser.add_argument(
259
+ "--use-date-prefix",
260
+ action="store_true",
261
+ help="Prefix each workflow filename with a timestamp"
262
+ )
263
+ parser.add_argument(
264
+ "--verify",
265
+ action="store_true",
266
+ help="Verify backup integrity after completion"
267
+ )
268
+ args = parser.parse_args()
269
+
270
+ try:
271
+ client = HubSpotClient()
272
+ except ValueError as e:
273
+ print(f"Error: {e}", file=sys.stderr)
274
+ sys.exit(1)
275
+
276
+ print("Listing all HubSpot automation flows (v4)...\n")
277
+ flows = client.list_flows()
278
+
279
+ if not flows:
280
+ print("No flows returned.")
281
+ return
282
+
283
+ print(f"Total flows returned: {len(flows)}")
284
+ for flow in flows:
285
+ print(f"{flow.get('id')}: {flow.get('name')}")
286
+
287
+ print("\nBacking up flows...")
288
+ run_dir = backup_all_flows(
289
+ client=client,
290
+ output_dir=args.output_dir,
291
+ use_date_dir=args.use_date_dir,
292
+ use_date_prefix=args.use_date_prefix,
293
+ )
294
+
295
+ print(f"\nBackup complete.")
296
+ print(f"Files saved to: {run_dir}")
297
+
298
+ if args.verify:
299
+ print("\nVerifying backup integrity...")
300
+ results = verify_backups(run_dir)
301
+ print(f" Verified: {len(results['verified'])}")
302
+ if results["failed"]:
303
+ print(f" Failed: {len(results['failed'])}")
304
+ for f in results["failed"]:
305
+ print(f" - {f}")
306
+ sys.exit(1)
307
+ if results["missing"]:
308
+ print(f" Missing: {len(results['missing'])}")
309
+ for f in results["missing"]:
310
+ print(f" - {f}")
311
+ print("All backups verified successfully.")
312
+
313
+
314
+ if __name__ == "__main__":
315
+ main()
@@ -0,0 +1,86 @@
1
+ import os
2
+ from typing import Optional
3
+
4
+ import requests
5
+
6
+
7
+ class HubSpotClient:
8
+ """Client for HubSpot Automation v4 API."""
9
+
10
+ BASE_URL = "https://api.hubapi.com"
11
+
12
+ def __init__(self, token: Optional[str] = None):
13
+ """
14
+ Initialize client.
15
+
16
+ Args:
17
+ token: HubSpot private app token. Falls back to HUBSPOT_AUTOMATION_TOKEN env var.
18
+ """
19
+ self.token = token or os.getenv("HUBSPOT_AUTOMATION_TOKEN")
20
+ if not self.token:
21
+ raise ValueError(
22
+ "HubSpot token required. Pass token= or set HUBSPOT_AUTOMATION_TOKEN env var."
23
+ )
24
+ self._headers = {
25
+ "Authorization": f"Bearer {self.token}",
26
+ "Content-Type": "application/json",
27
+ }
28
+
29
+ def list_flows(self) -> list:
30
+ """
31
+ List all automation flows with pagination.
32
+
33
+ Returns:
34
+ List of flow summary dicts.
35
+ """
36
+ url = f"{self.BASE_URL}/automation/v4/flows"
37
+ params = {"limit": 100}
38
+ flows: list = []
39
+
40
+ while True:
41
+ resp = requests.get(url, headers=self._headers, params=params, timeout=30)
42
+ resp.raise_for_status()
43
+
44
+ data = resp.json()
45
+ batch = data.get("results", [])
46
+ flows.extend(batch)
47
+
48
+ paging = data.get("paging") or {}
49
+ next_page = paging.get("next") if isinstance(paging, dict) else None
50
+ after = next_page.get("after") if isinstance(next_page, dict) else None
51
+ if not after:
52
+ break
53
+ params["after"] = after
54
+
55
+ return flows
56
+
57
+ def get_flow(self, flow_id: str) -> dict:
58
+ """
59
+ Get full details of a flow.
60
+
61
+ Args:
62
+ flow_id: HubSpot flow ID.
63
+
64
+ Returns:
65
+ Flow details dict.
66
+ """
67
+ url = f"{self.BASE_URL}/automation/v4/flows/{flow_id}"
68
+ resp = requests.get(url, headers=self._headers, timeout=30)
69
+ resp.raise_for_status()
70
+ return resp.json()
71
+
72
+ def update_flow(self, flow_id: str, body: dict) -> dict:
73
+ """
74
+ Update a flow configuration.
75
+
76
+ Args:
77
+ flow_id: HubSpot flow ID.
78
+ body: Flow configuration to apply.
79
+
80
+ Returns:
81
+ Updated flow dict.
82
+ """
83
+ url = f"{self.BASE_URL}/automation/v4/flows/{flow_id}"
84
+ resp = requests.put(url, headers=self._headers, json=body, timeout=30)
85
+ resp.raise_for_status()
86
+ return resp.json()
@@ -0,0 +1,327 @@
1
+ import argparse
2
+ import json
3
+ import re
4
+ import sys
5
+ from pathlib import Path
6
+ from typing import Optional, Union
7
+
8
+ from .client import HubSpotClient
9
+
10
+
11
+ def build_datasource_mapping(backup_datasources: list, target_datasources: list) -> dict:
12
+ """
13
+ Build mapping between backup and target datasource IDs.
14
+
15
+ Args:
16
+ backup_datasources: Datasources from backup JSON.
17
+ target_datasources: Datasources from current flow.
18
+
19
+ Returns:
20
+ Dict mapping old fetched_object IDs to new IDs.
21
+ """
22
+ def ds_key(ds):
23
+ return (ds.get("objectTypeId"), ds.get("associationTypeId"), ds.get("type"))
24
+
25
+ def extract_id(name):
26
+ match = re.search(r'fetched_object_(\d+)', name)
27
+ return match.group(1) if match else None
28
+
29
+ target_by_key = {}
30
+ for ds in target_datasources:
31
+ key = ds_key(ds)
32
+ ds_id = extract_id(ds.get("name", ""))
33
+ if ds_id and key not in target_by_key:
34
+ target_by_key[key] = ds_id
35
+
36
+ mapping = {}
37
+ for ds in backup_datasources:
38
+ key = ds_key(ds)
39
+ backup_id = extract_id(ds.get("name", ""))
40
+ if backup_id and key in target_by_key:
41
+ target_id = target_by_key[key]
42
+ if backup_id != target_id:
43
+ mapping[backup_id] = target_id
44
+
45
+ return mapping
46
+
47
+
48
+ def remap_fetched_objects(obj, mapping: dict):
49
+ """
50
+ Recursively remap fetched_object references in a data structure.
51
+
52
+ Args:
53
+ obj: Data structure (str, dict, list, or other).
54
+ mapping: Dict mapping old IDs to new IDs.
55
+
56
+ Returns:
57
+ Data structure with remapped references.
58
+ """
59
+ if not mapping:
60
+ return obj
61
+ if isinstance(obj, str):
62
+ result = obj
63
+ for old_id, new_id in mapping.items():
64
+ result = result.replace(f"fetched_object_{old_id}", f"fetched_object_{new_id}")
65
+ return result
66
+ elif isinstance(obj, dict):
67
+ return {k: remap_fetched_objects(v, mapping) for k, v in obj.items()}
68
+ elif isinstance(obj, list):
69
+ return [remap_fetched_objects(item, mapping) for item in obj]
70
+ return obj
71
+
72
+
73
+ def renumber_actions(backup_actions: list, backup_start_id: str, current_next_available: str) -> tuple:
74
+ """
75
+ Renumber action IDs to avoid conflicts with existing flow actions.
76
+
77
+ Args:
78
+ backup_actions: List of actions from backup.
79
+ backup_start_id: Start action ID from backup.
80
+ current_next_available: Next available action ID in target flow.
81
+
82
+ Returns:
83
+ Tuple of (renumbered_actions, new_start_id, new_next_available).
84
+ """
85
+ if not backup_actions:
86
+ return [], None, current_next_available
87
+
88
+ start_id = int(current_next_available)
89
+ old_to_new = {}
90
+
91
+ for i, action in enumerate(backup_actions):
92
+ old_id = action.get("actionId")
93
+ new_id = str(start_id + i)
94
+ old_to_new[old_id] = new_id
95
+
96
+ def remap_id(old_id):
97
+ return old_to_new.get(old_id, old_id)
98
+
99
+ def remap_action(action):
100
+ new_action = dict(action)
101
+ new_action["actionId"] = remap_id(action["actionId"])
102
+
103
+ if "connection" in new_action:
104
+ conn = dict(new_action["connection"])
105
+ if "nextActionId" in conn:
106
+ conn["nextActionId"] = remap_id(conn["nextActionId"])
107
+ new_action["connection"] = conn
108
+
109
+ if "staticBranches" in new_action and new_action["staticBranches"]:
110
+ new_branches = []
111
+ for branch in new_action["staticBranches"]:
112
+ new_branch = dict(branch)
113
+ if "nextActionId" in new_branch:
114
+ new_branch["nextActionId"] = remap_id(new_branch["nextActionId"])
115
+ if "connection" in new_branch and new_branch["connection"]:
116
+ conn = dict(new_branch["connection"])
117
+ if "nextActionId" in conn:
118
+ conn["nextActionId"] = remap_id(conn["nextActionId"])
119
+ new_branch["connection"] = conn
120
+ new_branches.append(new_branch)
121
+ new_action["staticBranches"] = new_branches
122
+
123
+ if "defaultBranch" in new_action:
124
+ db = dict(new_action["defaultBranch"])
125
+ if "nextActionId" in db:
126
+ db["nextActionId"] = remap_id(db["nextActionId"])
127
+ new_action["defaultBranch"] = db
128
+
129
+ if "acceptActions" in new_action:
130
+ new_action["acceptActions"] = [remap_id(a) for a in new_action["acceptActions"]]
131
+ if "rejectActions" in new_action:
132
+ new_action["rejectActions"] = [remap_id(a) for a in new_action["rejectActions"]]
133
+
134
+ if "listBranches" in new_action and new_action["listBranches"]:
135
+ new_list_branches = []
136
+ for lb in new_action["listBranches"]:
137
+ new_lb = dict(lb)
138
+ if "connection" in new_lb and new_lb["connection"]:
139
+ conn = dict(new_lb["connection"])
140
+ if "nextActionId" in conn:
141
+ conn["nextActionId"] = remap_id(conn["nextActionId"])
142
+ new_lb["connection"] = conn
143
+ new_list_branches.append(new_lb)
144
+ new_action["listBranches"] = new_list_branches
145
+
146
+ return new_action
147
+
148
+ renumbered = [remap_action(a) for a in backup_actions]
149
+ new_start_id = remap_id(backup_start_id) if backup_start_id else None
150
+ new_next_available = str(start_id + len(backup_actions))
151
+
152
+ def remap_action_output_refs(obj):
153
+ if isinstance(obj, str):
154
+ def replace_ref(match):
155
+ prefix = match.group(1)
156
+ old_id = match.group(2)
157
+ new_id = old_to_new.get(old_id, old_id)
158
+ return f"{prefix}{new_id}"
159
+ return re.sub(r'(action_outputs?\.action_output_?)(\d+)', replace_ref, obj)
160
+ elif isinstance(obj, dict):
161
+ return {k: remap_action_output_refs(v) for k, v in obj.items()}
162
+ elif isinstance(obj, list):
163
+ return [remap_action_output_refs(item) for item in obj]
164
+ else:
165
+ return obj
166
+
167
+ renumbered = remap_action_output_refs(renumbered)
168
+
169
+ return renumbered, new_start_id, new_next_available
170
+
171
+
172
+ def restore_flow(
173
+ backup: Union[str, Path, dict],
174
+ flow_id: Optional[str] = None,
175
+ name: Optional[str] = None,
176
+ token: Optional[str] = None,
177
+ client: Optional[HubSpotClient] = None,
178
+ dry_run: bool = False,
179
+ ) -> Optional[dict]:
180
+ """
181
+ Restore a HubSpot flow from a backup.
182
+
183
+ Args:
184
+ backup: Path to backup JSON file, or backup dict.
185
+ flow_id: Target flow ID. Defaults to ID in backup.
186
+ name: Override flow name. Defaults to name in backup.
187
+ token: HubSpot token. Falls back to HUBSPOT_AUTOMATION_TOKEN env var.
188
+ client: Pre-configured HubSpotClient instance.
189
+ dry_run: If True, return payload without making API call.
190
+
191
+ Returns:
192
+ Updated flow dict, or payload dict if dry_run=True.
193
+ """
194
+ if client is None:
195
+ client = HubSpotClient(token=token)
196
+
197
+ if isinstance(backup, (str, Path)):
198
+ backup_path = Path(backup)
199
+ if not backup_path.is_file():
200
+ raise FileNotFoundError(f"Backup file not found: {backup_path}")
201
+ with backup_path.open("r", encoding="utf-8") as f:
202
+ backup = json.load(f)
203
+
204
+ target_flow_id = flow_id or backup.get("id")
205
+ if not target_flow_id:
206
+ raise ValueError("flow_id not provided and not present in backup.")
207
+ target_flow_id = str(target_flow_id)
208
+
209
+ current = client.get_flow(target_flow_id)
210
+
211
+ current_revision = current.get("revisionId")
212
+ current_type = current.get("type")
213
+ current_name = current.get("name")
214
+ current_next_available = current.get("nextAvailableActionId", "1")
215
+
216
+ backup_actions = backup.get("actions", [])
217
+ backup_start_id = backup.get("startActionId")
218
+
219
+ renumbered_actions, new_start_id, new_next_available = renumber_actions(
220
+ backup_actions, backup_start_id, current_next_available
221
+ )
222
+
223
+ ds_mapping = build_datasource_mapping(
224
+ backup.get("dataSources", []),
225
+ current.get("dataSources", [])
226
+ )
227
+ if ds_mapping:
228
+ renumbered_actions = remap_fetched_objects(renumbered_actions, ds_mapping)
229
+
230
+ body = {
231
+ "type": current_type,
232
+ "revisionId": current_revision,
233
+ "objectTypeId": current.get("objectTypeId"),
234
+ "flowType": current.get("flowType"),
235
+ "actions": renumbered_actions,
236
+ "startActionId": new_start_id,
237
+ "nextAvailableActionId": new_next_available,
238
+ }
239
+
240
+ if name:
241
+ body["name"] = name
242
+ elif "name" in backup:
243
+ body["name"] = backup["name"]
244
+ else:
245
+ body["name"] = current_name
246
+
247
+ body["isEnabled"] = False
248
+
249
+ keys_to_copy = [
250
+ "description",
251
+ "customProperties",
252
+ "enrollmentCriteria",
253
+ "blockedDates",
254
+ "timeWindows",
255
+ ]
256
+ for key in keys_to_copy:
257
+ if key in backup:
258
+ body[key] = backup[key]
259
+
260
+ if dry_run:
261
+ return body
262
+
263
+ return client.update_flow(target_flow_id, body)
264
+
265
+
266
+ def main() -> None:
267
+ """CLI entry point for workflows-restore command."""
268
+ parser = argparse.ArgumentParser(
269
+ description="Restore a HubSpot automation flow from a backup JSON file."
270
+ )
271
+ parser.add_argument("backup_path", help="Path to the backup JSON file")
272
+ parser.add_argument("--flow-id", dest="flow_id", help="Override target flowId")
273
+ parser.add_argument("--name", dest="name", help="Override flow name")
274
+ parser.add_argument("--dry", action="store_true", help="Show payload without sending")
275
+
276
+ args = parser.parse_args()
277
+
278
+ backup_file = Path(args.backup_path)
279
+ if not backup_file.is_file():
280
+ print(f"Backup file not found: {backup_file}", file=sys.stderr)
281
+ sys.exit(1)
282
+
283
+ try:
284
+ client = HubSpotClient()
285
+ except ValueError as e:
286
+ print(f"Error: {e}", file=sys.stderr)
287
+ sys.exit(1)
288
+
289
+ with backup_file.open("r", encoding="utf-8") as f:
290
+ backup = json.load(f)
291
+
292
+ flow_id = args.flow_id or backup.get("id")
293
+ if not flow_id:
294
+ print("flowId not provided and not present in backup JSON.", file=sys.stderr)
295
+ sys.exit(1)
296
+
297
+ print(f"Using flowId: {flow_id}")
298
+ print(f"Backup file: {backup_file}")
299
+
300
+ current = client.get_flow(str(flow_id))
301
+ print("\nCurrent flow summary:")
302
+ print(f" name: {current.get('name')}")
303
+ print(f" revisionId: {current.get('revisionId')}")
304
+ print(f" type: {current.get('type')}")
305
+
306
+ print("\nBackup summary:")
307
+ print(f" name: {backup.get('name')}")
308
+ print(f" actions: {len(backup.get('actions', []))}")
309
+
310
+ if args.dry:
311
+ result = restore_flow(backup, flow_id=args.flow_id, name=args.name, client=client, dry_run=True)
312
+ print("\n[DRY RUN] Would send PUT /automation/v4/flows/{flowId} with body:")
313
+ print(json.dumps(result, indent=2))
314
+ return
315
+
316
+ print("\nSending PUT to update flow...")
317
+ updated = restore_flow(backup, flow_id=args.flow_id, name=args.name, client=client)
318
+
319
+ print("\nUpdate complete. New flow summary:")
320
+ print(f" id: {updated.get('id')}")
321
+ print(f" name: {updated.get('name')}")
322
+ print(f" revisionId: {updated.get('revisionId')}")
323
+ print(f" isEnabled: {updated.get('isEnabled')}")
324
+
325
+
326
+ if __name__ == "__main__":
327
+ main()
@@ -0,0 +1,161 @@
1
+ Metadata-Version: 2.4
2
+ Name: ft-hubspot-workflow-backup
3
+ Version: 0.1.4
4
+ Summary: Backup and restore HubSpot Automation (v4) workflows
5
+ Project-URL: Homepage, https://github.com/nflore/ft-hubspot-workflow-backup
6
+ Project-URL: Repository, https://github.com/nflore/ft-hubspot-workflow-backup
7
+ Project-URL: Issues, https://github.com/nflore/ft-hubspot-workflow-backup/issues
8
+ Project-URL: Changelog, https://github.com/nflore/ft-hubspot-workflow-backup/releases
9
+ Author: Nathan Flore
10
+ License-Expression: MIT
11
+ License-File: LICENSE
12
+ Requires-Python: >=3.9
13
+ Requires-Dist: requests>=2.25.0
14
+ Description-Content-Type: text/markdown
15
+
16
+ # HubSpot Workflow Backup & Restore
17
+
18
+ [![PyPI](https://img.shields.io/pypi/v/ft-hubspot-workflow-backup)](https://pypi.org/project/ft-hubspot-workflow-backup/)
19
+
20
+ Backup and restore HubSpot Automation (v4) workflows. There is NO confirmation once you request a restore, it will just overwrite the workflow.
21
+
22
+ ## Installation
23
+
24
+ ```bash
25
+ # From PyPI
26
+ pip install ft-hubspot-workflow-backup
27
+
28
+ # Or with uv
29
+ uv pip install ft-hubspot-workflow-backup
30
+
31
+ # Or add to pyproject.toml
32
+ # dependencies = ["ft-hubspot-workflow-backup>=0.1.0"]
33
+ ```
34
+
35
+ ## Requirements
36
+
37
+ - Python 3.9+ (recommended with [`uv`](https://github.com/astral-sh/uv))
38
+ - HubSpot private app token with `automation` read/write scope
39
+ - Environment variable `HUBSPOT_AUTOMATION_TOKEN` set to your token
40
+
41
+ ## Usage
42
+
43
+ ### Backup all workflows
44
+
45
+ ```bash
46
+ uv run workflows-backup
47
+ ```
48
+
49
+ Creates `snapshots/` with:
50
+ - One JSON file per workflow: `<slugified-name>.json`
51
+ - An `_index.json` listing all backed up flows with SHA-256 hashes for verification
52
+
53
+ Options:
54
+ - `-o, --output-dir <path>`: Custom output directory (default: `./snapshots/`)
55
+ - `--use-date-dir`: Create a timestamped subdirectory (e.g., `snapshots/2026_01_20_123456/`)
56
+ - `--use-date-prefix`: Prefix filenames with timestamp (e.g., `2026_01_20_123456_workflow-name.json`)
57
+ - `--verify`: Verify backup integrity immediately after completion
58
+
59
+ Example with date organization:
60
+ ```bash
61
+ uv run workflows-backup --use-date-dir --use-date-prefix
62
+ ```
63
+
64
+ ### Restore a workflow
65
+
66
+ ```bash
67
+ uv run workflows-restore <backup-file> [--flow-id <id>] [--name "<name>"] [--dry]
68
+ ```
69
+
70
+ Options:
71
+ - `--flow-id`: Target flow ID (defaults to ID in backup)
72
+ - `--name`: Override flow name
73
+ - `--dry`: Preview payload without sending
74
+
75
+ Example:
76
+ ```bash
77
+ uv run workflows-restore snapshots/<workflow-name>.json --dry
78
+ ```
79
+
80
+ ### As a Python module
81
+
82
+ ```python
83
+ from ft_hubspot_workflow_backup import backup_all_flows, restore_flow, verify_backups, HubSpotClient
84
+
85
+ # Backup (uses HUBSPOT_AUTOMATION_TOKEN env var)
86
+ snapshot_dir = backup_all_flows()
87
+
88
+ # With date-based organization
89
+ snapshot_dir = backup_all_flows(use_date_dir=True, use_date_prefix=True)
90
+
91
+ # Or with explicit token and custom output
92
+ client = HubSpotClient(token="your-token")
93
+ snapshot_dir = backup_all_flows(client=client, output_dir="./my-snapshots")
94
+
95
+ # Verify backups
96
+ results = verify_backups("./my-snapshots")
97
+ print(f"Verified: {len(results['verified'])}, Failed: {len(results['failed'])}")
98
+
99
+ # Restore
100
+ restore_flow("path/to/backup.json", flow_id="123456")
101
+ ```
102
+
103
+ ## Cryptographic Verification
104
+
105
+ Each backup includes SHA-256 hashes in `_index.json` to cryptographically verify workflow integrity. This allows you to detect if a workflow file has been modified since backup.
106
+
107
+ ### Verify a single workflow
108
+
109
+ ```bash
110
+ # Compare the stored hash with the actual file hash
111
+ shasum -a 256 snapshots/<workflow-name>.json
112
+ # Then compare with the hash in _index.json
113
+ ```
114
+
115
+ ### Verify all workflows (bash)
116
+
117
+ ```bash
118
+ cd snapshots
119
+ for file in *.json; do
120
+ [[ "$file" == "_index.json" ]] && continue
121
+ expected=$(jq -r ".flows[] | select(.filename == \"$file\") | .hash" _index.json)
122
+ actual=$(shasum -a 256 "$file" | cut -d' ' -f1)
123
+ if [[ "$expected" == "$actual" ]]; then
124
+ echo "✓ $file"
125
+ else
126
+ echo "✗ $file (hash mismatch)"
127
+ fi
128
+ done
129
+ ```
130
+
131
+ ### Verify programmatically (Python)
132
+
133
+ ```python
134
+ from ft_hubspot_workflow_backup import verify_backups
135
+
136
+ results = verify_backups("snapshots")
137
+ print(f"Verified: {len(results['verified'])}, Failed: {len(results['failed'])}")
138
+
139
+ if results["failed"]:
140
+ print("Hash mismatches:", results["failed"])
141
+ if results["missing"]:
142
+ print("Missing files:", results["missing"])
143
+ ```
144
+
145
+ ## Notes
146
+
147
+ - Secrets (`secretNames`) are not backed up; only their names are referenced.
148
+ - Flows reference HubSpot assets (pipelines, stages, templates) by ID. Restores assume those IDs are still valid.
149
+ - Restored flows are always set to DISABLED. Enable manually after verifying.
150
+
151
+ ## License
152
+
153
+ MIT License
154
+
155
+ Copyright (c) 2026 Nathan Flore / Flore Technologies, LLC.
156
+
157
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
158
+
159
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
160
+
161
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,9 @@
1
+ ft_hubspot_workflow_backup/__init__.py,sha256=G_Je8pEEMa7b2UWkWYhuM3Jp3zrDeH86-FbGs-UdzpE,306
2
+ ft_hubspot_workflow_backup/backup.py,sha256=h17iDp-ehV28sb-g22cm0GC-wonQxwbFaS1nITcmAeE,9113
3
+ ft_hubspot_workflow_backup/client.py,sha256=bXTd3wy6UW_d9h_WF_SF4QpemeBSvOXJBiJH9qKlS4o,2472
4
+ ft_hubspot_workflow_backup/restore.py,sha256=MAFXe3L9tUPrZyNweWItIiPFWL4uhNtAPLteh4REq8A,11246
5
+ ft_hubspot_workflow_backup-0.1.4.dist-info/METADATA,sha256=3Es2nEMZer5nqvhMu4Jeo25WLXwzOgjF2lOsBf-YkDw,5629
6
+ ft_hubspot_workflow_backup-0.1.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
7
+ ft_hubspot_workflow_backup-0.1.4.dist-info/entry_points.txt,sha256=vn3sTFGna_GR_ue_NvSIm1wHZEqnPkSNFIGNm-xvYRY,136
8
+ ft_hubspot_workflow_backup-0.1.4.dist-info/licenses/LICENSE,sha256=rI0DW87f-vx36nZsWmDypJXzsisR5242gMfDCEtJxiU,1096
9
+ ft_hubspot_workflow_backup-0.1.4.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ workflows-backup = ft_hubspot_workflow_backup.backup:main
3
+ workflows-restore = ft_hubspot_workflow_backup.restore:main
@@ -0,0 +1,9 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Nathan Flore / Flore Technologies, LLC.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
+
7
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8
+
9
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.