amplify-excel-migrator 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of amplify-excel-migrator might be problematic. Click here for more details.

@@ -0,0 +1,204 @@
1
+ Metadata-Version: 2.4
2
+ Name: amplify-excel-migrator
3
+ Version: 1.0.0
4
+ Summary: A CLI tool to migrate Excel data to AWS Amplify
5
+ Home-page: https://github.com/EyalPoly/amplify-excel-migrator
6
+ Author: Eyal Politansky
7
+ Author-email: 10eyal10@gmail.com
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Requires-Python: >=3.8
12
+ Description-Content-Type: text/markdown
13
+ License-File: LICENSE
14
+ Requires-Dist: pandas>=1.3.0
15
+ Requires-Dist: requests>=2.26.0
16
+ Requires-Dist: boto3>=1.18.0
17
+ Requires-Dist: pycognito>=2023.5.0
18
+ Requires-Dist: PyJWT>=2.0.0
19
+ Requires-Dist: aiohttp>=3.8.0
20
+ Requires-Dist: openpyxl>=3.0.0
21
+ Provides-Extra: dev
22
+ Requires-Dist: pytest>=8.0.0; extra == "dev"
23
+ Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
24
+ Requires-Dist: pytest-mock>=3.12.0; extra == "dev"
25
+ Requires-Dist: setuptools>=80.0.0; extra == "dev"
26
+ Requires-Dist: wheel>=0.40.0; extra == "dev"
27
+ Requires-Dist: twine>=4.0.0; extra == "dev"
28
+ Dynamic: author
29
+ Dynamic: author-email
30
+ Dynamic: classifier
31
+ Dynamic: description
32
+ Dynamic: description-content-type
33
+ Dynamic: home-page
34
+ Dynamic: license-file
35
+ Dynamic: provides-extra
36
+ Dynamic: requires-dist
37
+ Dynamic: requires-python
38
+ Dynamic: summary
39
+
40
+ # Amplify Excel Migrator
41
+
42
+ A CLI tool to migrate data from Excel files to AWS Amplify GraphQL API.
43
+ Developed for the MECO project - https://github.com/sworgkh/meco-observations-amplify
44
+
45
+ ## Installation
46
+
47
+ ### From GitHub
48
+
49
+ Install directly from GitHub:
50
+
51
+ ```bash
52
+ pip install git+https://github.com/EyalPoly/amplify-excel-migrator.git
53
+ ```
54
+
55
+ ### From Source
56
+
57
+ Clone the repository and install:
58
+
59
+ ```bash
60
+ git clone https://github.com/EyalPoly/amplify-excel-migrator.git
61
+ cd amplify-excel-migrator
62
+ pip install .
63
+ ```
64
+
65
+ ### For Development
66
+
67
+ Install with development dependencies:
68
+
69
+ ```bash
70
+ pip install -e ".[dev]"
71
+ ```
72
+
73
+ This installs the package in editable mode with pytest and other development tools.
74
+
75
+ ## Usage
76
+
77
+ The tool has three subcommands:
78
+
79
+ ### 1. Configure (First Time Setup)
80
+
81
+ Save your AWS Amplify configuration:
82
+
83
+ ```bash
84
+ amplify-migrator config
85
+ ```
86
+
87
+ This will prompt you for:
88
+ - Excel file path
89
+ - AWS Amplify API endpoint
90
+ - AWS Region
91
+ - Cognito User Pool ID
92
+ - Cognito Client ID
93
+ - Admin username
94
+
95
+ Configuration is saved to `~/.amplify-migrator/config.json` (passwords are never saved).
96
+
97
+ ### 2. Show Configuration
98
+
99
+ View your current saved configuration:
100
+
101
+ ```bash
102
+ amplify-migrator show
103
+ ```
104
+
105
+ ### 3. Run Migration
106
+
107
+ Run the migration using your saved configuration:
108
+
109
+ ```bash
110
+ amplify-migrator migrate
111
+ ```
112
+
113
+ You'll only be prompted for your password (for security, passwords are never cached).
114
+
115
+ ### Quick Start
116
+
117
+ ```bash
118
+ # First time: configure the tool
119
+ amplify-migrator config
120
+
121
+ # View current configuration
122
+ amplify-migrator show
123
+
124
+ # Run migration (uses saved config)
125
+ amplify-migrator migrate
126
+
127
+ # View help
128
+ amplify-migrator --help
129
+ ```
130
+
131
+ ### Example: Configuration
132
+
133
+ ```
134
+ ╔════════════════════════════════════════════════════╗
135
+ ║ Amplify Migrator - Configuration Setup ║
136
+ ╚════════════════════════════════════════════════════╝
137
+
138
+ 📋 Configuration Setup:
139
+ ------------------------------------------------------
140
+ Excel file path [data.xlsx]: my-data.xlsx
141
+ AWS Amplify API endpoint: https://xxx.appsync-api.us-east-1.amazonaws.com/graphql
142
+ AWS Region [us-east-1]:
143
+ Cognito User Pool ID: us-east-1_xxxxx
144
+ Cognito Client ID: your-client-id
145
+ Admin Username: admin@example.com
146
+
147
+ ✅ Configuration saved successfully!
148
+ 💡 You can now run 'amplify-migrator migrate' to start the migration.
149
+ ```
150
+
151
+ ### Example: Migration
152
+
153
+ ```
154
+ ╔════════════════════════════════════════════════════╗
155
+ ║ Migrator Tool for Amplify ║
156
+ ╠════════════════════════════════════════════════════╣
157
+ ║ This tool requires admin privileges to execute ║
158
+ ╚════════════════════════════════════════════════════╝
159
+
160
+ 🔐 Authentication:
161
+ ------------------------------------------------------
162
+ Admin Password: ********
163
+ ```
164
+
165
+ ## Requirements
166
+
167
+ - Python 3.8+
168
+ - AWS Amplify GraphQL API
169
+ - AWS Cognito User Pool
170
+ - Admin access to the Cognito User Pool
171
+
172
+ ## Features
173
+
174
+ - **Configuration caching** - Save your setup, reuse it for multiple migrations
175
+ - **Interactive prompts** - Easy step-by-step configuration
176
+ - **Custom types and enums** - Full support for Amplify custom types
177
+ - **Duplicate detection** - Automatically skips existing records
178
+ - **Async uploads** - Fast parallel uploads for better performance
179
+ - **MFA support** - Works with multi-factor authentication
180
+ - **Automatic type parsing** - Smart field type detection and conversion
181
+
182
+ ## Excel File Format
183
+
184
+ The Excel file should have:
185
+ - One sheet per Amplify model (sheet name must match model name)
186
+ - Column names matching the model field names
187
+ - First row as headers
188
+
189
+ ### Example Excel Structure
190
+
191
+ **Sheet: User**
192
+ | name | email | age |
193
+ |------|-------|-----|
194
+ | John | john@example.com | 30 |
195
+ | Jane | jane@example.com | 25 |
196
+
197
+ **Sheet: Post**
198
+ | title | content | userId |
199
+ |-------|---------|--------|
200
+ | First Post | Hello World | john@example.com |
201
+
202
+ ## License
203
+
204
+ MIT
@@ -0,0 +1,9 @@
1
+ amplify_client.py,sha256=fZ9LKCzEMOY3upNGaPCEuofVXI8IN2JemQFxTsQN6q8,26916
2
+ migrator.py,sha256=03YO5n6jymPA5bDr1Ks6FeSwi7bb8lZ_9LBjFSJCDi8,12081
3
+ model_field_parser.py,sha256=u7f55WYg6eRS-_iyq9swzxntqyUQMH9vaX3j-RUG76w,4328
4
+ amplify_excel_migrator-1.0.0.dist-info/licenses/LICENSE,sha256=i8Sf8mXscGI9l-HTQ5RLQkAJU6Iv5hPYctJksPY70U0,1071
5
+ amplify_excel_migrator-1.0.0.dist-info/METADATA,sha256=7qXRVir9VJq6NEM7B1WswHe0jx4YF95iutb2nPWMLog,5552
6
+ amplify_excel_migrator-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
7
+ amplify_excel_migrator-1.0.0.dist-info/entry_points.txt,sha256=Ifd7YnV4lNbjFbbnjsmlHWiIAfIpiC5POgJtxfSlDT4,51
8
+ amplify_excel_migrator-1.0.0.dist-info/top_level.txt,sha256=C-ffRe3F26GYiM7f6xy-pPvbwnh7Wnieyt-jS-cbdTU,43
9
+ amplify_excel_migrator-1.0.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ amplify-migrator = migrator:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Eyal Politansky
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,3 @@
1
+ amplify_client
2
+ migrator
3
+ model_field_parser
migrator.py ADDED
@@ -0,0 +1,301 @@
1
+ import argparse
2
+ import json
3
+ import logging
4
+ import re
5
+ import sys
6
+ from getpass import getpass
7
+ from pathlib import Path
8
+ from typing import Dict, Any
9
+
10
+ import pandas as pd
11
+
12
+ from amplify_client import AmplifyClient
13
+ from model_field_parser import ModelFieldParser
14
+
15
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
16
+ logger = logging.getLogger(__name__)
17
+
18
+ CONFIG_DIR = Path.home() / '.amplify-migrator'
19
+ CONFIG_FILE = CONFIG_DIR / 'config.json'
20
+
21
+
22
+ class ExcelToAmplifyMigrator:
23
+ def __init__(self, excel_file_path: str):
24
+ self.model_field_parser = ModelFieldParser()
25
+ self.excel_file_path = excel_file_path
26
+ self.amplify_client = None
27
+
28
+ def init_client(self, api_endpoint: str, region: str, user_pool_id: str, is_aws_admin: bool = False,
29
+ client_id: str = None, username: str = None, aws_profile: str = None):
30
+
31
+ self.amplify_client = AmplifyClient(
32
+ api_endpoint=api_endpoint,
33
+ user_pool_id=user_pool_id,
34
+ region=region,
35
+ client_id=client_id,
36
+ )
37
+
38
+ try:
39
+ self.amplify_client.init_cognito_client(is_aws_admin=is_aws_admin, username=username,
40
+ aws_profile=aws_profile)
41
+
42
+ except RuntimeError or Exception:
43
+ sys.exit(1)
44
+
45
+ def authenticate(self, username: str, password: str) -> bool:
46
+ return self.amplify_client.authenticate(username, password)
47
+
48
+ def run(self):
49
+ all_sheets = self.read_excel()
50
+
51
+ for sheet_name, df in all_sheets.items():
52
+ logger.info(f"Processing {sheet_name} sheet with {len(df)} rows")
53
+ self.process_sheet(df, sheet_name)
54
+
55
+ def read_excel(self) -> Dict[str, Any]:
56
+ logger.info(f"Reading Excel file: {self.excel_file_path}")
57
+ all_sheets = pd.read_excel(self.excel_file_path, sheet_name=None)
58
+
59
+ logger.info(f"Loaded {len(all_sheets)} sheets from Excel")
60
+ return all_sheets
61
+
62
+ def process_sheet(self, df: pd.DataFrame, sheet_name: str):
63
+ parsed_model_structure = self.get_parsed_model_structure(sheet_name)
64
+ records = self.transform_rows_to_records(df, parsed_model_structure)
65
+
66
+ # confirm = input(f"\nUpload {len(records)} records of {sheet_name} to Amplify? (yes/no): ")
67
+ # if confirm.lower() != 'yes':
68
+ # logger.info("Upload cancelled for {sheet_name} sheet")
69
+ # return
70
+
71
+ success_count, error_count = self.amplify_client.upload(records, sheet_name, parsed_model_structure)
72
+
73
+ logger.info(f"=== Upload of Excel sheet: {sheet_name} Complete ===")
74
+ logger.info(f"✅ Success: {success_count}")
75
+ logger.info(f"❌ Failed: {error_count}")
76
+ logger.info(f"📊 Total: {len(records)}")
77
+
78
+ def transform_rows_to_records(self, df: pd.DataFrame, parsed_model_structure: Dict[str, Any]) -> list[Any]:
79
+ records = []
80
+ df.columns = [self.to_camel_case(c) for c in df.columns]
81
+ for idx, row in df.iterrows():
82
+ try:
83
+ record = self.transform_row_to_record(row, parsed_model_structure)
84
+ if record:
85
+ records.append(record)
86
+ except Exception as e:
87
+ logger.error(f"Error transforming row {idx}: {e}")
88
+
89
+ logger.info(f"Prepared {len(records)} records for upload")
90
+
91
+ return records
92
+
93
+ def get_parsed_model_structure(self, sheet_name: str) -> Dict[str, Any]:
94
+ model_structure = self.amplify_client.get_model_structure(sheet_name)
95
+ return self.model_field_parser.parse_model_structure(model_structure)
96
+
97
+ def transform_row_to_record(self, row: pd.Series, parsed_model_structure: Dict[str, Any]) -> dict[Any, Any] | None:
98
+ """Transform a DataFrame row to Amplify model format"""
99
+
100
+ model_record = {}
101
+
102
+ for field in parsed_model_structure['fields']:
103
+ input = self.parse_input(row, field, parsed_model_structure)
104
+ if input:
105
+ model_record[field['name']] = input
106
+
107
+ return model_record
108
+
109
+ def parse_input(self, row: pd.Series, field: Dict[str, Any], parsed_model_structure: Dict[str, Any]) -> Any:
110
+ field_name = field['name'][:-2] if field['is_id'] else field['name']
111
+ if field_name not in row.index or pd.isna(row[field_name]):
112
+ if field['is_required']:
113
+ raise ValueError(f"Required field '{field_name}' is missing in row {row.name}")
114
+ else:
115
+ return None
116
+
117
+ value = row.get(field['name'])
118
+ if field['is_id']:
119
+ related_model = (temp := field['name'][:-2])[0].upper() + temp[1:]
120
+ record = self.amplify_client.get_record(related_model, parsed_model_structure=parsed_model_structure,
121
+ value=value, fields=['id'])
122
+ if record:
123
+ if record['id'] is None and field['is_required']:
124
+ raise ValueError(f"{related_model}: {value} does not exist")
125
+ else:
126
+ value = record['id']
127
+ else:
128
+ raise ValueError(f"Error fetching related record {related_model}: {value}")
129
+
130
+ return value
131
+
132
+ @staticmethod
133
+ def to_camel_case(s: str) -> str:
134
+ # Handle PascalCase
135
+ s_with_spaces = re.sub(r'(?<!^)(?=[A-Z])', ' ', s)
136
+
137
+ parts = re.split(r'[\s_\-]+', s_with_spaces.strip())
138
+ return parts[0].lower() + ''.join(word.capitalize() for word in parts[1:])
139
+
140
+
141
+ def get_config_value(prompt: str, default: str = '', secret: bool = False) -> str:
142
+ if default:
143
+ prompt = f"{prompt} [{default}]: "
144
+ else:
145
+ prompt = f"{prompt}: "
146
+
147
+ if secret:
148
+ value = getpass(prompt)
149
+ else:
150
+ value = input(prompt)
151
+
152
+ return value.strip() if value.strip() else default
153
+
154
+
155
+ def save_config(config: Dict[str, str]) -> None:
156
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
157
+
158
+ cache_config = {k: v for k, v in config.items() if k not in ['password', 'ADMIN_PASSWORD']}
159
+
160
+ with open(CONFIG_FILE, 'w') as f:
161
+ json.dump(cache_config, f, indent=2)
162
+
163
+ logger.info(f"✅ Configuration saved to {CONFIG_FILE}")
164
+
165
+
166
+ def load_cached_config() -> Dict[str, str]:
167
+ if not CONFIG_FILE.exists():
168
+ return {}
169
+
170
+ try:
171
+ with open(CONFIG_FILE, 'r') as f:
172
+ return json.load(f)
173
+ except Exception as e:
174
+ logger.warning(f"Failed to load cached config: {e}")
175
+ return {}
176
+
177
+
178
+ def get_cached_or_prompt(key: str, prompt: str, cached_config: Dict, default: str = '', secret: bool = False) -> str:
179
+ if key in cached_config:
180
+ return cached_config[key]
181
+
182
+ return get_config_value(prompt, default, secret)
183
+
184
+
185
+ def cmd_show(args=None):
186
+ print("""
187
+ ╔════════════════════════════════════════════════════╗
188
+ ║ Amplify Migrator - Current Configuration ║
189
+ ╚════════════════════════════════════════════════════╝
190
+ """)
191
+
192
+ cached_config = load_cached_config()
193
+
194
+ if not cached_config:
195
+ print("\n❌ No configuration found!")
196
+ print("💡 Run 'amplify-migrator config' first to set up your configuration.")
197
+ return
198
+
199
+ print("\n📋 Cached Configuration:")
200
+ print("-" * 54)
201
+ print(f"Excel file path: {cached_config.get('excel_path', 'N/A')}")
202
+ print(f"API endpoint: {cached_config.get('api_endpoint', 'N/A')}")
203
+ print(f"AWS Region: {cached_config.get('region', 'N/A')}")
204
+ print(f"User Pool ID: {cached_config.get('user_pool_id', 'N/A')}")
205
+ print(f"Client ID: {cached_config.get('client_id', 'N/A')}")
206
+ print(f"Admin Username: {cached_config.get('username', 'N/A')}")
207
+ print("-" * 54)
208
+ print(f"\n📍 Config location: {CONFIG_FILE}")
209
+ print(f"💡 Run 'amplify-migrator config' to update configuration.")
210
+
211
+
212
+ def cmd_config(args=None):
213
+ print("""
214
+ ╔════════════════════════════════════════════════════╗
215
+ ║ Amplify Migrator - Configuration Setup ║
216
+ ╚════════════════════════════════════════════════════╝
217
+ """)
218
+
219
+ config = {'excel_path': get_config_value('Excel file path', 'data.xlsx'),
220
+ 'api_endpoint': get_config_value('AWS Amplify API endpoint'),
221
+ 'region': get_config_value('AWS Region', 'us-east-1'),
222
+ 'user_pool_id': get_config_value('Cognito User Pool ID'),
223
+ 'client_id': get_config_value('Cognito Client ID'),
224
+ 'username': get_config_value('Admin Username')}
225
+
226
+ save_config(config)
227
+ print("\n✅ Configuration saved successfully!")
228
+ print(f"💡 You can now run 'amplify-migrator migrate' to start the migration.")
229
+
230
+
231
+ def cmd_migrate(args=None):
232
+ print("""
233
+ ╔════════════════════════════════════════════════════╗
234
+ ║ Migrator Tool for Amplify ║
235
+ ╠════════════════════════════════════════════════════╣
236
+ ║ This tool requires admin privileges to execute ║
237
+ ╚════════════════════════════════════════════════════╝
238
+ """)
239
+
240
+ cached_config = load_cached_config()
241
+
242
+ if not cached_config:
243
+ print("\n❌ No configuration found!")
244
+ print("💡 Run 'amplify-migrator config' first to set up your configuration.")
245
+ sys.exit(1)
246
+
247
+ excel_path = get_cached_or_prompt('excel_path', 'Excel file path', cached_config, 'data.xlsx')
248
+ api_endpoint = get_cached_or_prompt('api_endpoint', 'AWS Amplify API endpoint', cached_config)
249
+ region = get_cached_or_prompt('region', 'AWS Region', cached_config, 'us-east-1')
250
+ user_pool_id = get_cached_or_prompt('user_pool_id', 'Cognito User Pool ID', cached_config)
251
+ client_id = get_cached_or_prompt('client_id', 'Cognito Client ID', cached_config)
252
+ username = get_cached_or_prompt('username', 'Admin Username', cached_config)
253
+
254
+ print("\n🔐 Authentication:")
255
+ print("-" * 54)
256
+ password = get_config_value('ADMIN_PASSWORD', 'Admin Password', secret=True)
257
+
258
+ migrator = ExcelToAmplifyMigrator(excel_path)
259
+ migrator.init_client(api_endpoint, region, user_pool_id, client_id=client_id,
260
+ username=username)
261
+ if not migrator.authenticate(username, password):
262
+ return
263
+
264
+ migrator.run()
265
+
266
+
267
+ def main():
268
+ parser = argparse.ArgumentParser(
269
+ description='Amplify Excel Migrator - Migrate Excel data to AWS Amplify GraphQL API',
270
+ formatter_class=argparse.RawDescriptionHelpFormatter
271
+ )
272
+
273
+ subparsers = parser.add_subparsers(dest='command', help='Available commands')
274
+
275
+ config_parser = subparsers.add_parser('config', help='Configure the migration tool')
276
+ config_parser.set_defaults(func=cmd_config)
277
+
278
+ show_parser = subparsers.add_parser('show', help='Show current configuration')
279
+ show_parser.set_defaults(func=cmd_show)
280
+
281
+ migrate_parser = subparsers.add_parser('migrate', help='Run the migration')
282
+ migrate_parser.set_defaults(func=cmd_migrate)
283
+
284
+ args = parser.parse_args()
285
+
286
+ if args.command is None:
287
+ parser.print_help()
288
+ sys.exit(1)
289
+
290
+ args.func(args)
291
+
292
+
293
+ if __name__ == "__main__":
294
+ # For IDE debugging: set the command you want to test
295
+ # Uncomment and modify one of these lines:
296
+
297
+ # sys.argv = ['migrator.py', 'config'] # Test config command
298
+ # sys.argv = ['migrator.py', 'show'] # Test show command
299
+ # sys.argv = ['migrator.py', 'migrate'] # Test migrate command
300
+
301
+ main()
model_field_parser.py ADDED
@@ -0,0 +1,134 @@
1
+ from typing import Dict, Any
2
+
3
+
4
+ class ModelFieldParser:
5
+ """Parse GraphQL model fields from introspection results"""
6
+
7
+ def __init__(self):
8
+ self.scalar_types = {
9
+ 'String', 'Int', 'Float', 'Boolean',
10
+ 'AWSDate', 'AWSTime', 'AWSDateTime', 'AWSTimestamp',
11
+ 'AWSEmail', 'AWSJSON', 'AWSURL', 'AWSPhone', 'AWSIPAddress'
12
+ }
13
+ self.metadata_fields = {'id', 'createdAt', 'updatedAt', 'owner'}
14
+
15
+ def parse_model_structure(self, introspection_result: Dict) -> Dict[str, Any]:
16
+ if 'data' in introspection_result and '__type' in introspection_result['data']:
17
+ type_data = introspection_result['data']['__type']
18
+ else:
19
+ type_data = introspection_result
20
+
21
+ model_info = {
22
+ 'name': type_data.get('name'),
23
+ 'kind': type_data.get('kind'),
24
+ 'description': type_data.get('description'),
25
+ 'fields': []
26
+ }
27
+
28
+ if type_data.get('fields'):
29
+ for field in type_data['fields']:
30
+ parsed_field = self._parse_field(field)
31
+ model_info['fields'].append(parsed_field) if parsed_field else None
32
+
33
+ return model_info
34
+
35
+ def _parse_field(self, field: Dict) -> Dict[str, Any]:
36
+ base_type = self._get_base_type_name(field.get('type', {}))
37
+ if 'Connection' in base_type or field.get('name') in self.metadata_fields or self._get_type_kind(
38
+ field.get('type', {})) in ['OBJECT', 'INTERFACE']:
39
+ return {}
40
+
41
+ field_info = {
42
+ 'name': field.get('name'),
43
+ 'description': field.get('description'),
44
+ 'type': base_type,
45
+ 'is_required': self._is_required_field(field.get('type', {})),
46
+ 'is_list': self._is_list_type(field.get('type', {})),
47
+ 'is_scalar': base_type in self.scalar_types,
48
+ 'is_id': base_type == 'ID',
49
+ 'is_enum': field.get('type', {}).get('kind') == 'ENUM',
50
+ }
51
+
52
+ return field_info
53
+
54
+ def _parse_type(self, type_obj: Dict) -> Dict[str, Any]:
55
+ """
56
+ Recursively parse type information
57
+ """
58
+
59
+ if not type_obj:
60
+ return {'name': 'Unknown', 'kind': 'UNKNOWN'}
61
+
62
+ type_info = {
63
+ 'name': type_obj.get('name'),
64
+ 'kind': type_obj.get('kind'),
65
+ 'full_type': self._get_full_type_string(type_obj)
66
+ }
67
+
68
+ # If there's nested type info (NON_NULL, LIST), include it
69
+ if type_obj.get('ofType'):
70
+ type_info['of_type'] = self._parse_type(type_obj['ofType'])
71
+
72
+ return type_info
73
+
74
+ def _get_full_type_string(self, type_obj: Dict) -> str:
75
+ """
76
+ Get human-readable type string (e.g., '[String!]!')
77
+ """
78
+
79
+ if not type_obj:
80
+ return 'Unknown'
81
+
82
+ if type_obj.get('name'):
83
+ return type_obj['name']
84
+
85
+ if type_obj['kind'] == 'NON_NULL':
86
+ inner = self._get_full_type_string(type_obj.get('ofType', {}))
87
+ return f"{inner}!"
88
+
89
+ if type_obj['kind'] == 'LIST':
90
+ inner = self._get_full_type_string(type_obj.get('ofType', {}))
91
+ return f"[{inner}]"
92
+
93
+ return type_obj.get('kind', 'Unknown')
94
+
95
+ def _get_base_type_name(self, type_obj: Dict) -> str:
96
+ """
97
+ Get the base type name, unwrapping NON_NULL and LIST wrappers
98
+ """
99
+
100
+ if not type_obj:
101
+ return 'Unknown'
102
+
103
+ if type_obj.get('name'):
104
+ return type_obj['name']
105
+
106
+ if type_obj.get('ofType'):
107
+ return self._get_base_type_name(type_obj['ofType'])
108
+
109
+ return 'Unknown'
110
+
111
+ def _get_type_kind(self, type_obj: Dict) -> str:
112
+ if not type_obj:
113
+ return 'UNKNOWN'
114
+
115
+ if type_obj['kind'] in ['NON_NULL', 'LIST'] and type_obj.get('ofType'):
116
+ return self._get_type_kind(type_obj['ofType'])
117
+
118
+ return type_obj.get('kind', 'UNKNOWN')
119
+
120
+ @staticmethod
121
+ def _is_required_field(type_obj: Dict) -> bool:
122
+ return type_obj and type_obj.get('kind') == 'NON_NULL'
123
+
124
+ def _is_list_type(self, type_obj: Dict) -> bool:
125
+ if not type_obj:
126
+ return False
127
+
128
+ if type_obj['kind'] == 'LIST':
129
+ return True
130
+
131
+ if type_obj.get('ofType'):
132
+ return self._is_list_type(type_obj['ofType'])
133
+
134
+ return False