amplify-excel-migrator 1.1.5__py3-none-any.whl → 1.2.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. amplify_excel_migrator/__init__.py +17 -0
  2. amplify_excel_migrator/auth/__init__.py +6 -0
  3. amplify_excel_migrator/auth/cognito_auth.py +306 -0
  4. amplify_excel_migrator/auth/provider.py +42 -0
  5. amplify_excel_migrator/cli/__init__.py +5 -0
  6. amplify_excel_migrator/cli/commands.py +165 -0
  7. amplify_excel_migrator/client.py +47 -0
  8. amplify_excel_migrator/core/__init__.py +5 -0
  9. amplify_excel_migrator/core/config.py +98 -0
  10. amplify_excel_migrator/data/__init__.py +7 -0
  11. amplify_excel_migrator/data/excel_reader.py +23 -0
  12. amplify_excel_migrator/data/transformer.py +119 -0
  13. amplify_excel_migrator/data/validator.py +48 -0
  14. amplify_excel_migrator/graphql/__init__.py +8 -0
  15. amplify_excel_migrator/graphql/client.py +137 -0
  16. amplify_excel_migrator/graphql/executor.py +405 -0
  17. amplify_excel_migrator/graphql/mutation_builder.py +80 -0
  18. amplify_excel_migrator/graphql/query_builder.py +194 -0
  19. amplify_excel_migrator/migration/__init__.py +8 -0
  20. amplify_excel_migrator/migration/batch_uploader.py +23 -0
  21. amplify_excel_migrator/migration/failure_tracker.py +92 -0
  22. amplify_excel_migrator/migration/orchestrator.py +143 -0
  23. amplify_excel_migrator/migration/progress_reporter.py +57 -0
  24. amplify_excel_migrator/schema/__init__.py +6 -0
  25. model_field_parser.py → amplify_excel_migrator/schema/field_parser.py +100 -22
  26. amplify_excel_migrator/schema/introspector.py +95 -0
  27. {amplify_excel_migrator-1.1.5.dist-info → amplify_excel_migrator-1.2.15.dist-info}/METADATA +121 -26
  28. amplify_excel_migrator-1.2.15.dist-info/RECORD +40 -0
  29. amplify_excel_migrator-1.2.15.dist-info/entry_points.txt +2 -0
  30. amplify_excel_migrator-1.2.15.dist-info/top_level.txt +2 -0
  31. tests/__init__.py +1 -0
  32. tests/test_cli_commands.py +292 -0
  33. tests/test_client.py +187 -0
  34. tests/test_cognito_auth.py +363 -0
  35. tests/test_config_manager.py +347 -0
  36. tests/test_field_parser.py +615 -0
  37. tests/test_mutation_builder.py +391 -0
  38. tests/test_query_builder.py +384 -0
  39. amplify_client.py +0 -941
  40. amplify_excel_migrator-1.1.5.dist-info/RECORD +0 -9
  41. amplify_excel_migrator-1.1.5.dist-info/entry_points.txt +0 -2
  42. amplify_excel_migrator-1.1.5.dist-info/top_level.txt +0 -3
  43. migrator.py +0 -437
  44. {amplify_excel_migrator-1.1.5.dist-info → amplify_excel_migrator-1.2.15.dist-info}/WHEEL +0 -0
  45. {amplify_excel_migrator-1.1.5.dist-info → amplify_excel_migrator-1.2.15.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,405 @@
1
+ """Query executor for high-level GraphQL operations."""
2
+
3
+ import asyncio
4
+ import logging
5
+ from typing import Dict, Any, Optional, List
6
+
7
+ import aiohttp
8
+
9
+ from .client import GraphQLClient
10
+ from .query_builder import QueryBuilder
11
+ from .mutation_builder import MutationBuilder
12
+ from amplify_excel_migrator.schema import SchemaIntrospector
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class QueryExecutor:
18
+ def __init__(self, client: GraphQLClient, batch_size: int = 20):
19
+ self.client = client
20
+ self.batch_size = batch_size
21
+ self.records_cache: Dict[str, List[Dict]] = {}
22
+ self.schema = SchemaIntrospector(client)
23
+
24
+ def get_model_structure(self, model_type: str) -> Dict[str, Any]:
25
+ return self.schema.get_model_structure(model_type)
26
+
27
+ def get_primary_field_name(self, model_name: str, parsed_model_structure: Dict[str, Any]) -> tuple[str, bool, str]:
28
+ return self.schema.get_primary_field_name(model_name, parsed_model_structure)
29
+
30
+ def _get_list_query_name(self, model_name: str) -> Optional[str]:
31
+ return self.schema.get_list_query_name(model_name)
32
+
33
+ def list_records_by_secondary_index(
34
+ self,
35
+ model_name: str,
36
+ secondary_index: str,
37
+ value: Optional[str] = None,
38
+ fields: Optional[List[str]] = None,
39
+ field_type: str = "String",
40
+ ) -> Optional[List[Dict]]:
41
+ if fields is None:
42
+ fields = ["id", secondary_index]
43
+
44
+ all_items = []
45
+ next_token = None
46
+
47
+ if not value:
48
+ query = QueryBuilder.build_list_query(model_name, fields=fields)
49
+ query_name = self._get_list_query_name(model_name)
50
+
51
+ while True:
52
+ variables = QueryBuilder.build_variables_for_list(next_token=next_token)
53
+ result = self.client.request(query, variables)
54
+
55
+ if result and "data" in result:
56
+ data = result["data"].get(query_name, {})
57
+ items = data.get("items", [])
58
+ all_items.extend(items)
59
+ next_token = data.get("nextToken")
60
+
61
+ if not next_token:
62
+ break
63
+ else:
64
+ break
65
+ else:
66
+ query = QueryBuilder.build_secondary_index_query(
67
+ model_name, secondary_index, fields=fields, field_type=field_type
68
+ )
69
+ query_name = f"list{model_name}By{secondary_index[0].upper() + secondary_index[1:]}"
70
+
71
+ while True:
72
+ variables = {secondary_index: value, "limit": 1000, "nextToken": next_token}
73
+ result = self.client.request(query, variables)
74
+
75
+ if result and "data" in result:
76
+ data = result["data"].get(query_name, {})
77
+ items = data.get("items", [])
78
+ all_items.extend(items)
79
+ next_token = data.get("nextToken")
80
+
81
+ if not next_token:
82
+ break
83
+ else:
84
+ break
85
+
86
+ return all_items if all_items else None
87
+
88
+ def list_records_by_field(
89
+ self, model_name: str, field_name: str, value: Optional[str] = None, fields: Optional[List[str]] = None
90
+ ) -> Optional[List[Dict]]:
91
+ if fields is None:
92
+ fields = ["id", field_name]
93
+
94
+ all_items = []
95
+ next_token = None
96
+ query_name = self._get_list_query_name(model_name)
97
+
98
+ if not value:
99
+ query = QueryBuilder.build_list_query(model_name, fields=fields)
100
+
101
+ while True:
102
+ variables = QueryBuilder.build_variables_for_list(next_token=next_token)
103
+ result = self.client.request(query, variables)
104
+
105
+ if result and "data" in result:
106
+ data = result["data"].get(query_name, {})
107
+ items = data.get("items", [])
108
+ all_items.extend(items)
109
+ next_token = data.get("nextToken")
110
+
111
+ if not next_token:
112
+ break
113
+ else:
114
+ break
115
+ else:
116
+ query = QueryBuilder.build_list_query_with_filter(model_name, fields=fields)
117
+ filter_input = QueryBuilder.build_filter_equals(field_name, value)
118
+
119
+ while True:
120
+ variables = {"filter": filter_input, "limit": 1000, "nextToken": next_token}
121
+ result = self.client.request(query, variables)
122
+
123
+ if result and "data" in result:
124
+ data = result["data"].get(query_name, {})
125
+ items = data.get("items", [])
126
+ all_items.extend(items)
127
+ next_token = data.get("nextToken")
128
+
129
+ if not next_token:
130
+ break
131
+ else:
132
+ break
133
+
134
+ return all_items if all_items else None
135
+
136
+ def get_record_by_id(self, model_name: str, record_id: str, fields: Optional[List[str]] = None) -> Optional[Dict]:
137
+ if fields is None:
138
+ fields = ["id"]
139
+
140
+ query = QueryBuilder.build_get_by_id_query(model_name, fields=fields)
141
+ query_name = f"get{model_name}"
142
+
143
+ result = self.client.request(query, {"id": record_id})
144
+
145
+ if result and "data" in result:
146
+ return result["data"].get(query_name)
147
+
148
+ return None
149
+
150
+ def get_records(
151
+ self,
152
+ model_name: str,
153
+ primary_field: Optional[str] = None,
154
+ is_secondary_index: Optional[bool] = None,
155
+ fields: Optional[List[str]] = None,
156
+ ) -> Optional[List[Dict]]:
157
+ if model_name in self.records_cache:
158
+ return self.records_cache[model_name]
159
+
160
+ if not primary_field:
161
+ return None
162
+
163
+ if is_secondary_index:
164
+ records = self.list_records_by_secondary_index(model_name, primary_field, fields=fields)
165
+ else:
166
+ records = self.list_records_by_field(model_name, primary_field, fields=fields)
167
+
168
+ if records:
169
+ self.records_cache[model_name] = records
170
+ logger.debug(f"💾 Cached {len(records)} records for {model_name}")
171
+
172
+ return records
173
+
174
+ def get_record(
175
+ self,
176
+ model_name: str,
177
+ parsed_model_structure: Optional[Dict[str, Any]] = None,
178
+ value: Optional[str] = None,
179
+ record_id: Optional[str] = None,
180
+ primary_field: Optional[str] = None,
181
+ is_secondary_index: Optional[bool] = None,
182
+ fields: Optional[List[str]] = None,
183
+ ) -> Optional[Dict]:
184
+ if record_id:
185
+ return self.get_record_by_id(model_name, record_id)
186
+
187
+ if not primary_field:
188
+ if not parsed_model_structure:
189
+ logger.error("Parsed model structure required if primary_field not provided")
190
+ return None
191
+ primary_field, is_secondary_index, _ = self.get_primary_field_name(model_name, parsed_model_structure)
192
+
193
+ records = self.get_records(model_name, primary_field, is_secondary_index, fields)
194
+ if not records:
195
+ return None
196
+
197
+ return next((record for record in records if record.get(primary_field) == value), None)
198
+
199
+ async def create_record_async(
200
+ self, session: aiohttp.ClientSession, data: Dict, model_name: str, primary_field: str
201
+ ) -> Optional[Dict]:
202
+ mutation = MutationBuilder.build_create_mutation(model_name, return_fields=["id", primary_field])
203
+ variables = MutationBuilder.build_create_variables(data)
204
+
205
+ context = f"{model_name}: {primary_field}={data.get(primary_field)}"
206
+ result = await self.client.request_async(session, mutation, variables, context)
207
+
208
+ if result and "data" in result:
209
+ created = result["data"].get(f"create{model_name}")
210
+ if created:
211
+ logger.info(f'Created {model_name} with {primary_field}="{data[primary_field]}" (ID: {created["id"]})')
212
+ return created
213
+ else:
214
+ logger.error(f'Failed to create {model_name} with {primary_field}="{data[primary_field]}"')
215
+
216
+ return None
217
+
218
+ async def check_record_exists_async(
219
+ self,
220
+ session: aiohttp.ClientSession,
221
+ model_name: str,
222
+ primary_field: str,
223
+ value: str,
224
+ is_secondary_index: bool,
225
+ record: Dict,
226
+ field_type: str = "String",
227
+ ) -> Optional[Dict]:
228
+ context = f"{model_name}: {primary_field}={value}"
229
+
230
+ if is_secondary_index:
231
+ query = QueryBuilder.build_secondary_index_query(
232
+ model_name, primary_field, fields=["id"], field_type=field_type, with_pagination=False
233
+ )
234
+ variables = {primary_field: value}
235
+ query_name = f"list{model_name}By{primary_field[0].upper() + primary_field[1:]}"
236
+
237
+ result = await self.client.request_async(session, query, variables, context)
238
+ if result and "data" in result:
239
+ items = result["data"].get(query_name, {}).get("items", [])
240
+ if len(items) > 0:
241
+ logger.warning(f'Record with {primary_field}="{value}" already exists in {model_name}')
242
+ return None
243
+ else:
244
+ query_name = self._get_list_query_name(model_name)
245
+ query = QueryBuilder.build_list_query_with_filter(model_name, fields=["id"], with_pagination=False)
246
+ filter_input = QueryBuilder.build_filter_equals(primary_field, value)
247
+ variables = {"filter": filter_input}
248
+
249
+ result = await self.client.request_async(session, query, variables, context)
250
+ if result and "data" in result:
251
+ items = result["data"].get(query_name, {}).get("items", [])
252
+ if len(items) > 0:
253
+ logger.error(f'Record with {primary_field}="{value}" already exists in {model_name}')
254
+ return None
255
+
256
+ return record
257
+
258
+ async def upload_batch_async(
259
+ self,
260
+ batch: List[Dict],
261
+ model_name: str,
262
+ primary_field: str,
263
+ is_secondary_index: bool,
264
+ field_type: str = "String",
265
+ ) -> tuple[int, int, List[Dict]]:
266
+ async with aiohttp.ClientSession() as session:
267
+ duplicate_checks = [
268
+ self.check_record_exists_async(
269
+ session, model_name, primary_field, record[primary_field], is_secondary_index, record, field_type
270
+ )
271
+ for record in batch
272
+ ]
273
+ check_results = await asyncio.gather(*duplicate_checks, return_exceptions=True)
274
+
275
+ filtered_batch = []
276
+ failed_records = []
277
+
278
+ for i, result in enumerate(check_results):
279
+ if isinstance(result, Exception):
280
+ error_msg = str(result)
281
+ failed_records.append(
282
+ {
283
+ "primary_field": primary_field,
284
+ "primary_field_value": batch[i].get(primary_field, "Unknown"),
285
+ "error": f"Duplicate check error: {error_msg}",
286
+ }
287
+ )
288
+ logger.error(f"Error checking duplicate: {result}")
289
+ elif result is not None:
290
+ filtered_batch.append(result)
291
+
292
+ if not filtered_batch:
293
+ return 0, len(batch), failed_records
294
+
295
+ create_tasks = [
296
+ self.create_record_async(session, record, model_name, primary_field) for record in filtered_batch
297
+ ]
298
+ results = await asyncio.gather(*create_tasks, return_exceptions=True)
299
+
300
+ for i, result in enumerate(results):
301
+ if isinstance(result, Exception):
302
+ error_msg = str(result)
303
+ failed_records.append(
304
+ {
305
+ "primary_field": primary_field,
306
+ "primary_field_value": filtered_batch[i].get(primary_field, "Unknown"),
307
+ "error": error_msg,
308
+ }
309
+ )
310
+ elif not result:
311
+ failed_records.append(
312
+ {
313
+ "primary_field": primary_field,
314
+ "primary_field_value": filtered_batch[i].get(primary_field, "Unknown"),
315
+ "error": "Creation failed - no response",
316
+ }
317
+ )
318
+
319
+ success_count = sum(1 for r in results if r and not isinstance(r, Exception))
320
+ error_count = len(batch) - success_count
321
+
322
+ return success_count, error_count, failed_records
323
+
324
+ def upload(
325
+ self, records: List[Dict], model_name: str, parsed_model_structure: Dict[str, Any]
326
+ ) -> tuple[int, int, List[Dict]]:
327
+ logger.info("Uploading to Amplify backend...")
328
+
329
+ success_count = 0
330
+ error_count = 0
331
+ all_failed_records = []
332
+ num_of_batches = (len(records) + self.batch_size - 1) // self.batch_size
333
+
334
+ primary_field, is_secondary_index, field_type = self.get_primary_field_name(model_name, parsed_model_structure)
335
+ if not primary_field:
336
+ logger.error(f"Aborting upload for model {model_name}")
337
+ return 0, len(records), []
338
+
339
+ for i in range(0, len(records), self.batch_size):
340
+ batch = records[i : i + self.batch_size]
341
+ logger.info(f"Uploading batch {i // self.batch_size + 1} / {num_of_batches} ({len(batch)} items)...")
342
+
343
+ batch_success, batch_error, batch_failed_records = asyncio.run(
344
+ self.upload_batch_async(batch, model_name, primary_field, is_secondary_index, field_type)
345
+ )
346
+ success_count += batch_success
347
+ error_count += batch_error
348
+ all_failed_records.extend(batch_failed_records)
349
+
350
+ logger.info(
351
+ f"Processed batch {i // self.batch_size + 1} of model {model_name}: {success_count} success, {error_count} errors"
352
+ )
353
+
354
+ return success_count, error_count, all_failed_records
355
+
356
+ def build_foreign_key_lookups(self, df, parsed_model_structure: Dict[str, Any]) -> Dict[str, Dict[str, str]]:
357
+ """
358
+ Build a cache of foreign key lookups for all ID fields in the DataFrame.
359
+
360
+ This pre-fetches all related records to avoid N+1 query problems during row processing.
361
+
362
+ Args:
363
+ df: pandas DataFrame containing the data to be processed
364
+ parsed_model_structure: Parsed model structure containing field information
365
+
366
+ Returns:
367
+ Dictionary mapping model names to lookup dictionaries and primary fields
368
+ """
369
+ fk_lookup_cache = {}
370
+
371
+ for field in parsed_model_structure["fields"]:
372
+ if not field["is_id"]:
373
+ continue
374
+
375
+ field_name = field["name"][:-2]
376
+
377
+ if field_name not in df.columns:
378
+ continue
379
+
380
+ if "related_model" in field:
381
+ related_model = field["related_model"]
382
+ else:
383
+ related_model = field_name[0].upper() + field_name[1:]
384
+
385
+ if related_model in fk_lookup_cache:
386
+ continue
387
+
388
+ try:
389
+ primary_field, is_secondary_index, _ = self.get_primary_field_name(
390
+ related_model, parsed_model_structure
391
+ )
392
+ records = self.get_records(related_model, primary_field, is_secondary_index)
393
+
394
+ if records:
395
+ lookup = {
396
+ str(record.get(primary_field)): record.get("id")
397
+ for record in records
398
+ if record.get(primary_field)
399
+ }
400
+ fk_lookup_cache[related_model] = {"lookup": lookup, "primary_field": primary_field}
401
+ logger.debug(f" 📦 Cached {len(lookup)} {related_model} records")
402
+ except Exception as e:
403
+ logger.warning(f" ⚠️ Could not pre-fetch {related_model}: {e}")
404
+
405
+ return fk_lookup_cache
@@ -0,0 +1,80 @@
1
+ """GraphQL mutation string builder."""
2
+
3
+ from typing import Dict, Any, List, Optional, Tuple
4
+
5
+
6
+ class MutationBuilder:
7
+ """Builds GraphQL mutation strings for Amplify GraphQL API."""
8
+
9
+ @staticmethod
10
+ def build_create_mutation(
11
+ model_name: str,
12
+ return_fields: Optional[List[str]] = None,
13
+ ) -> str:
14
+ if return_fields is None:
15
+ return_fields = ["id"]
16
+
17
+ fields_str = "\n".join(f" {field}" for field in return_fields)
18
+
19
+ mutation = f"""
20
+ mutation Create{model_name}($input: Create{model_name}Input!) {{
21
+ create{model_name}(input: $input) {{
22
+ {fields_str}
23
+ }}
24
+ }}
25
+ """
26
+ return mutation.strip()
27
+
28
+ @staticmethod
29
+ def build_update_mutation(
30
+ model_name: str,
31
+ return_fields: Optional[List[str]] = None,
32
+ ) -> str:
33
+ if return_fields is None:
34
+ return_fields = ["id"]
35
+
36
+ fields_str = "\n".join(f" {field}" for field in return_fields)
37
+
38
+ mutation = f"""
39
+ mutation Update{model_name}($input: Update{model_name}Input!) {{
40
+ update{model_name}(input: $input) {{
41
+ {fields_str}
42
+ }}
43
+ }}
44
+ """
45
+ return mutation.strip()
46
+
47
+ @staticmethod
48
+ def build_delete_mutation(
49
+ model_name: str,
50
+ return_fields: Optional[List[str]] = None,
51
+ ) -> str:
52
+ if return_fields is None:
53
+ return_fields = ["id"]
54
+
55
+ fields_str = "\n".join(f" {field}" for field in return_fields)
56
+
57
+ mutation = f"""
58
+ mutation Delete{model_name}($input: Delete{model_name}Input!) {{
59
+ delete{model_name}(input: $input) {{
60
+ {fields_str}
61
+ }}
62
+ }}
63
+ """
64
+ return mutation.strip()
65
+
66
+ @staticmethod
67
+ def build_create_variables(input_data: Dict[str, Any]) -> Dict[str, Any]:
68
+ return {"input": input_data}
69
+
70
+ @staticmethod
71
+ def build_update_variables(
72
+ record_id: str,
73
+ updates: Dict[str, Any],
74
+ ) -> Dict[str, Any]:
75
+ input_data = {"id": record_id, **updates}
76
+ return {"input": input_data}
77
+
78
+ @staticmethod
79
+ def build_delete_variables(record_id: str) -> Dict[str, Any]:
80
+ return {"input": {"id": record_id}}
@@ -0,0 +1,194 @@
1
+ """GraphQL query string builder."""
2
+
3
+ from typing import List, Optional, Any, Dict
4
+
5
+
6
+ class QueryBuilder:
7
+ """Builds GraphQL query strings for Amplify GraphQL API."""
8
+
9
+ @staticmethod
10
+ def build_list_query(
11
+ model_name: str,
12
+ fields: Optional[List[str]] = None,
13
+ limit: int = 1000,
14
+ with_pagination: bool = True,
15
+ ) -> str:
16
+ if fields is None:
17
+ fields = ["id"]
18
+
19
+ fields_str = "\n".join(f" {field}" for field in fields)
20
+ query_name = f"list{model_name}s"
21
+
22
+ if with_pagination:
23
+ query = f"""
24
+ query List{model_name}s($limit: Int, $nextToken: String) {{
25
+ {query_name}(limit: $limit, nextToken: $nextToken) {{
26
+ items {{
27
+ {fields_str}
28
+ }}
29
+ nextToken
30
+ }}
31
+ }}
32
+ """
33
+ else:
34
+ query = f"""
35
+ query List{model_name}s($limit: Int) {{
36
+ {query_name}(limit: $limit) {{
37
+ items {{
38
+ {fields_str}
39
+ }}
40
+ }}
41
+ }}
42
+ """
43
+ return query.strip()
44
+
45
+ @staticmethod
46
+ def build_list_query_with_filter(
47
+ model_name: str,
48
+ fields: Optional[List[str]] = None,
49
+ limit: int = 1000,
50
+ with_pagination: bool = True,
51
+ ) -> str:
52
+ if fields is None:
53
+ fields = ["id"]
54
+
55
+ fields_str = "\n".join(f" {field}" for field in fields)
56
+ query_name = f"list{model_name}s"
57
+
58
+ if with_pagination:
59
+ query = f"""
60
+ query List{model_name}s($filter: Model{model_name}FilterInput, $limit: Int, $nextToken: String) {{
61
+ {query_name}(filter: $filter, limit: $limit, nextToken: $nextToken) {{
62
+ items {{
63
+ {fields_str}
64
+ }}
65
+ nextToken
66
+ }}
67
+ }}
68
+ """
69
+ else:
70
+ query = f"""
71
+ query List{model_name}s($filter: Model{model_name}FilterInput, $limit: Int) {{
72
+ {query_name}(filter: $filter, limit: $limit) {{
73
+ items {{
74
+ {fields_str}
75
+ }}
76
+ }}
77
+ }}
78
+ """
79
+ return query.strip()
80
+
81
+ @staticmethod
82
+ def build_secondary_index_query(
83
+ model_name: str,
84
+ index_field: str,
85
+ fields: Optional[List[str]] = None,
86
+ field_type: str = "String",
87
+ with_pagination: bool = True,
88
+ ) -> str:
89
+ if fields is None:
90
+ fields = ["id", index_field]
91
+
92
+ fields_str = "\n".join(f" {field}" for field in fields)
93
+ query_name = f"list{model_name}By{index_field[0].upper() + index_field[1:]}"
94
+
95
+ if with_pagination:
96
+ query = f"""
97
+ query {query_name}(${index_field}: {field_type}!, $limit: Int, $nextToken: String) {{
98
+ {query_name}({index_field}: ${index_field}, limit: $limit, nextToken: $nextToken) {{
99
+ items {{
100
+ {fields_str}
101
+ }}
102
+ nextToken
103
+ }}
104
+ }}
105
+ """
106
+ else:
107
+ query = f"""
108
+ query {query_name}(${index_field}: {field_type}!) {{
109
+ {query_name}({index_field}: ${index_field}) {{
110
+ items {{
111
+ {fields_str}
112
+ }}
113
+ }}
114
+ }}
115
+ """
116
+ return query.strip()
117
+
118
+ @staticmethod
119
+ def build_get_by_id_query(
120
+ model_name: str,
121
+ fields: Optional[List[str]] = None,
122
+ ) -> str:
123
+ if fields is None:
124
+ fields = ["id"]
125
+
126
+ fields_str = "\n".join(f" {field}" for field in fields)
127
+
128
+ query = f"""
129
+ query Get{model_name}($id: ID!) {{
130
+ get{model_name}(id: $id) {{
131
+ {fields_str}
132
+ }}
133
+ }}
134
+ """
135
+ return query.strip()
136
+
137
+ @staticmethod
138
+ def build_introspection_query(model_name: str) -> str:
139
+ query = f"""
140
+ query IntrospectModel {{
141
+ __type(name: "{model_name}") {{
142
+ name
143
+ fields {{
144
+ name
145
+ type {{
146
+ name
147
+ kind
148
+ ofType {{
149
+ name
150
+ kind
151
+ }}
152
+ }}
153
+ }}
154
+ }}
155
+ }}
156
+ """
157
+ return query.strip()
158
+
159
+ @staticmethod
160
+ def build_variables_for_list(
161
+ limit: int = 1000,
162
+ next_token: Optional[str] = None,
163
+ ) -> Dict[str, Any]:
164
+ variables = {"limit": limit}
165
+ if next_token:
166
+ variables["nextToken"] = next_token
167
+ return variables
168
+
169
+ @staticmethod
170
+ def build_variables_for_filter(
171
+ filter_dict: Dict[str, Any],
172
+ limit: int = 1000,
173
+ next_token: Optional[str] = None,
174
+ ) -> Dict[str, Any]:
175
+ variables = {"filter": filter_dict, "limit": limit}
176
+ if next_token:
177
+ variables["nextToken"] = next_token
178
+ return variables
179
+
180
+ @staticmethod
181
+ def build_variables_for_secondary_index(
182
+ index_field: str,
183
+ value: Any,
184
+ limit: int = 1000,
185
+ next_token: Optional[str] = None,
186
+ ) -> Dict[str, Any]:
187
+ variables = {index_field: value, "limit": limit}
188
+ if next_token:
189
+ variables["nextToken"] = next_token
190
+ return variables
191
+
192
+ @staticmethod
193
+ def build_filter_equals(field: str, value: Any) -> Dict[str, Any]:
194
+ return {field: {"eq": value}}
@@ -0,0 +1,8 @@
1
+ """Migration workflow components."""
2
+
3
+ from .failure_tracker import FailureTracker
4
+ from .progress_reporter import ProgressReporter
5
+ from .batch_uploader import BatchUploader
6
+ from .orchestrator import MigrationOrchestrator
7
+
8
+ __all__ = ["FailureTracker", "ProgressReporter", "BatchUploader", "MigrationOrchestrator"]