amsdal_cli 0.5.2__py3-none-any.whl → 0.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,499 @@
1
+ import logging
2
+ import time
3
+ from collections.abc import Iterator
4
+ from typing import Any
5
+
6
+ import httpx
7
+ import jwt
8
+ import typer
9
+ from rich import print as rprint
10
+ from starlette import status
11
+
12
+ from amsdal_cli.commands.api_check.config import ApiCheckConfig
13
+ from amsdal_cli.commands.api_check.data_classes import ClassItem
14
+ from amsdal_cli.commands.api_check.data_classes import Transaction
15
+ from amsdal_cli.commands.api_check.operation_log import OperationLog
16
+ from amsdal_cli.commands.api_check.services.data_factory import DataFactory
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class ApiRunner:
22
+ def __init__(self, base_url: str, config: ApiCheckConfig) -> None:
23
+ self.base_url = base_url
24
+ self.config = config
25
+ self.logs: list[OperationLog] = []
26
+
27
+ def authenticate(self) -> None:
28
+ """
29
+ Authenticate using login credentials and store the token in the config.
30
+ Uses credentials from environment variables if available, otherwise from config.
31
+ """
32
+ # Get email and password from environment variables or config
33
+ email = self.config.env_email or self.config.email
34
+ password = self.config.env_password or self.config.password
35
+
36
+ if not email or not password:
37
+ return
38
+
39
+ rprint(f'[blue]Authenticating with login credentials on {self.base_url}[/blue]')
40
+ payload = {'email': email, 'password': password}
41
+
42
+ try:
43
+ response = httpx.post(
44
+ f"{self.base_url.rstrip('/')}/objects/",
45
+ params={'class_name': 'LoginSession', 'load_references': 'false'},
46
+ json=payload,
47
+ timeout=self.config.request_timeout,
48
+ headers=self.config.headers,
49
+ )
50
+ response.raise_for_status()
51
+ data = response.json()
52
+
53
+ if 'token' in data:
54
+ self.config.token = data['token']
55
+ # Token expiry is automatically extracted from the token
56
+ try:
57
+ # Decode token just to verify it's valid
58
+ jwt.decode(data['token'], options={'verify_signature': False})
59
+ # Use the token_expiry property to get the expiry time
60
+ rprint(
61
+ f'[green]Authentication successful. '
62
+ f'Token expires at: {time.ctime(self.config.token_expiry)}[/green]'
63
+ )
64
+ # Save the token to the config file using the config's save method
65
+ self.config.save()
66
+ except Exception as e:
67
+ logger.warning(f'Failed to decode token: {e}')
68
+ else:
69
+ rprint('[red]Authentication failed: No token in response[/red]')
70
+ # Exit with error code if authentication failed
71
+ raise typer.Exit(code=1)
72
+ except httpx.HTTPStatusError as e:
73
+ rprint(f'[red]Authentication failed: HTTP error {e.response.status_code} - {e}[/red]')
74
+ # Exit with error code for HTTP errors
75
+ raise typer.Exit(code=1) from e
76
+ except Exception as e:
77
+ rprint(f'[red]Authentication failed: {e}[/red]')
78
+ # Exit with error code for other exceptions
79
+ raise typer.Exit(code=1) from e
80
+
81
+ def is_token_valid(self) -> bool:
82
+ """
83
+ Check if the stored token is valid (not expired).
84
+ """
85
+ if not self.config.token or not self.config.token_expiry:
86
+ return False
87
+
88
+ current_time = int(time.time())
89
+ # Add a 30-second buffer to ensure we don't use a token that's about to expire
90
+ return current_time < (self.config.token_expiry - 30)
91
+
92
+ def run(self) -> list[OperationLog]:
93
+ self.logs.clear()
94
+ rprint('[blue]Running API checks[/blue]', end='\n' if self.config.extend_output else '')
95
+
96
+ # Check if token is valid
97
+ if not self.is_token_valid():
98
+ # Get email and password from environment variables or config
99
+ email = self.config.env_email or self.config.email
100
+ password = self.config.env_password or self.config.password
101
+
102
+ # If credentials are available, authenticate
103
+ if email and password:
104
+ self.authenticate()
105
+ # If token is invalid and no credentials are available, but we need authentication
106
+ elif self.config.env_authorization or self.config.auth_headers:
107
+ # Only raise error if we were trying to use authentication
108
+ rprint(
109
+ '[red]Error: Token is invalid and no credentials (email/password) '
110
+ 'are available for authentication.[/red]'
111
+ )
112
+ rprint(
113
+ '[red]Please provide valid credentials via environment variables '
114
+ '(AMSDAL_API_CHECK_EMAIL, AMSDAL_API_CHECK_PASSWORD) or config file.[/red]'
115
+ )
116
+ import typer
117
+
118
+ raise typer.Exit(code=1)
119
+
120
+ classes = list(self.get_class_list(with_auth=False))
121
+ _classes = list(self.get_class_list(with_auth=True))
122
+
123
+ if _classes:
124
+ classes = _classes
125
+
126
+ cls: ClassItem
127
+
128
+ for cls in self._iterate_items_per_list(classes):
129
+ self.get_class_detail(cls, with_auth=False)
130
+ self.get_class_detail(cls, with_auth=True)
131
+
132
+ objects = list(self.get_object_list(cls))
133
+
134
+ for obj in self._iterate_items_per_list(objects):
135
+ self.get_object_detail(obj)
136
+ self.get_object_detail(obj, with_auth=True)
137
+
138
+ if self.config.object_write_operations_enabled:
139
+ self.check_write_operations(cls)
140
+ self.check_write_operations(cls, with_auth=True)
141
+
142
+ # Get transactions without auth
143
+ transactions = self.get_transaction_list()
144
+
145
+ # Get transactions with auth if auth headers are available
146
+ if self.config.auth_headers:
147
+ transactions_with_auth = self.get_transaction_list(with_auth=True)
148
+ # Combine the lists, avoiding duplicates
149
+ for transaction in transactions_with_auth:
150
+ if transaction not in transactions:
151
+ transactions.append(transaction)
152
+
153
+ for transaction in self._iterate_transactions(transactions):
154
+ # Get transaction details without auth
155
+ self.get_transaction_detail(transaction, ignore_status_code=True)
156
+ self.check_transaction_execute(transaction, ignore_status_code=True)
157
+
158
+ # Get transaction details with auth if auth headers are available
159
+ if self.config.auth_headers:
160
+ self.get_transaction_detail(transaction, with_auth=True, ignore_status_code=False)
161
+ self.check_transaction_execute(transaction, with_auth=True)
162
+
163
+ return self.logs
164
+
165
+ def get_class_list(self, *, with_auth: bool = False) -> Iterator[ClassItem]:
166
+ if isinstance(self.config.exclude_classes, str) and self.config.exclude_classes == 'ALL':
167
+ return
168
+
169
+ data = self._request('GET', 'classes/', with_auth=with_auth)
170
+
171
+ for item in data['rows']:
172
+ cls_item = ClassItem(**item)
173
+
174
+ if cls_item.class_name in self.config.exclude_classes:
175
+ continue
176
+
177
+ yield cls_item
178
+
179
+ def get_class_detail(self, class_item: ClassItem, *, with_auth: bool = False) -> None:
180
+ self._request('GET', f'classes/{class_item.class_name}/', with_auth=with_auth)
181
+
182
+ def get_object_list(self, class_item: ClassItem) -> Iterator[dict[str, Any]]:
183
+ if class_item.class_name in self.config.exclude_objects_for_classes:
184
+ return
185
+
186
+ if self.config.objects_list_params_options:
187
+ _params_options = self.config.objects_list_params_options
188
+ else:
189
+ _params_options = [
190
+ {
191
+ 'include_metadata': False,
192
+ 'include_subclasses': False,
193
+ 'load_references': False,
194
+ 'all_versions': False,
195
+ 'file_optimized': False,
196
+ 'page_size': 15,
197
+ },
198
+ {
199
+ 'include_metadata': True,
200
+ 'include_subclasses': True,
201
+ 'load_references': False,
202
+ 'file_optimized': True,
203
+ 'page_size': 15,
204
+ },
205
+ ]
206
+
207
+ for params in _params_options:
208
+ # without auth
209
+ self._request(
210
+ 'GET',
211
+ 'objects/',
212
+ params={
213
+ 'class_name': class_item.class_name,
214
+ **params,
215
+ },
216
+ )
217
+
218
+ # with auth
219
+ data = self._request(
220
+ 'GET',
221
+ 'objects/',
222
+ with_auth=True,
223
+ params={
224
+ 'class_name': class_item.class_name,
225
+ **params,
226
+ },
227
+ )
228
+
229
+ yield from data['rows']
230
+
231
+ def get_object_detail(self, object_item: dict[str, Any], *, with_auth: bool = False) -> None:
232
+ if self.config.object_detail_params_options:
233
+ _params_options = self.config.object_detail_params_options
234
+ else:
235
+ _params_options = [
236
+ {
237
+ 'all_versions': False,
238
+ 'include_metadata': False,
239
+ 'file_optimized': False,
240
+ },
241
+ {
242
+ 'all_versions': False,
243
+ 'include_metadata': False,
244
+ 'file_optimized': True,
245
+ },
246
+ ]
247
+ address = object_item.get('_metadata', {}).get('lakehouse_address')
248
+
249
+ if not address:
250
+ logger.warning(f'No lakehouse address for object: {object_item}')
251
+
252
+ for params in _params_options:
253
+ self._request('GET', f'objects/{address}/', params=params, with_auth=with_auth)
254
+
255
+ def check_write_operations(self, class_item: ClassItem, *, with_auth: bool = False) -> None:
256
+ if class_item.class_name in self.config.exclude_object_write_operations_for_classes:
257
+ return
258
+
259
+ # Create a sample object for testing
260
+ sample_data = DataFactory.build_data(class_item)
261
+
262
+ create_response = self._request('POST', 'objects/', json=sample_data, with_auth=with_auth)
263
+
264
+ if create_response.status_code != httpx.codes.OK:
265
+ return
266
+
267
+ # Get the address of the created object
268
+ address = create_response.get('_metadata', {}).get('lakehouse_address')
269
+
270
+ if not address:
271
+ logger.warning(f'Failed to create object for class: {class_item.class_name}')
272
+ return
273
+
274
+ response = self._request('GET', f'objects/{address}/', with_auth=with_auth)
275
+
276
+ if response.status_code != httpx.codes.OK:
277
+ return
278
+
279
+ data = response['rows'][0]
280
+ update_data = DataFactory.build_update_data(class_item, data)
281
+
282
+ response = self._request('PUT', f'objects/{address}/', json=update_data, with_auth=with_auth)
283
+
284
+ if response.status_code != httpx.codes.OK:
285
+ return
286
+
287
+ self._request('DELETE', f'objects/{address}/', with_auth=with_auth)
288
+
289
+ def _iterate_items_per_list(self, items: list[Any]) -> Iterator[Any]:
290
+ if not items:
291
+ return
292
+
293
+ start, middle, end = self.config.items_per_list
294
+ total_items = len(items)
295
+ requested_items = start + middle + end
296
+
297
+ # If list is shorter than or equal to requested items, yield all
298
+ if total_items <= requested_items:
299
+ yield from items
300
+ return
301
+
302
+ # Yield start items
303
+ yield from items[:start]
304
+
305
+ # Calculate middle section
306
+ remaining_items = total_items - start - end
307
+ if remaining_items > 0 and middle > 0:
308
+ middle_start = start + (remaining_items - middle) // 2
309
+ middle_end = middle_start + middle
310
+ yield from items[middle_start:middle_end]
311
+
312
+ # Yield end items
313
+ if end > 0:
314
+ yield from items[-end:]
315
+
316
+ def get_transaction_list(self, *, with_auth: bool = False) -> list[Transaction]:
317
+ """
318
+ Get the list of transactions.
319
+
320
+ Args:
321
+ with_auth: Whether to include authentication headers in the request
322
+
323
+ Returns:
324
+ A list of Transaction objects
325
+ """
326
+ transactions = []
327
+
328
+ data = self._request('GET', 'transactions/', with_auth=with_auth)
329
+
330
+ # Only process data if we got a valid response with rows
331
+ if isinstance(data, dict) and 'rows' in data:
332
+ for item in data['rows']:
333
+ transactions.append(Transaction(**item))
334
+
335
+ return transactions
336
+
337
+ def _iterate_transactions(self, transactions: list[Transaction]) -> Iterator[Transaction]:
338
+ if not transactions:
339
+ return
340
+
341
+ if isinstance(self.config.exclude_transactions, str):
342
+ if self.config.exclude_transactions == 'ALL':
343
+ return
344
+ else:
345
+ msg = 'Unknown exclude_transactions value. Expected "ALL" or list of transaction names.'
346
+ raise ValueError(msg)
347
+
348
+ for transaction in transactions:
349
+ if transaction.title in self.config.exclude_transactions:
350
+ continue
351
+
352
+ yield transaction
353
+
354
+ def get_transaction_detail(
355
+ self,
356
+ transaction: Transaction,
357
+ *,
358
+ with_auth: bool = False,
359
+ ignore_status_code: bool = False,
360
+ ) -> None:
361
+ """
362
+ Get details for a transaction.
363
+
364
+ Args:
365
+ transaction: The transaction to get details for
366
+ with_auth: Whether to include authentication headers in the request
367
+ ignore_status_code: Whether to ignore status codes other than 200 and 201 when checking
368
+ """
369
+ # Make a request to get transaction details
370
+ self._request(
371
+ 'GET',
372
+ f'transactions/{transaction.title}/',
373
+ with_auth=with_auth,
374
+ ignore_status_code=ignore_status_code,
375
+ )
376
+
377
+ def check_transaction_execute(
378
+ self,
379
+ transaction: Transaction,
380
+ *,
381
+ with_auth: bool = False,
382
+ ignore_status_code: bool = False,
383
+ ) -> None:
384
+ """
385
+ Execute a transaction.
386
+
387
+ Args:
388
+ transaction: The transaction to execute
389
+ with_auth: Whether to include authentication headers in the request
390
+ ignore_status_code: Whether to ignore status codes other than 200 and 201 when checking
391
+ transaction execution.
392
+ """
393
+ # Check if transaction should be excluded
394
+ if isinstance(self.config.exclude_execute_transactions, str):
395
+ if self.config.exclude_execute_transactions == 'ALL':
396
+ return
397
+ else:
398
+ msg = 'Unknown exclude_execute_transactions value. Expected "ALL" or list of transaction names.'
399
+ raise ValueError(msg)
400
+
401
+ if transaction.title in self.config.exclude_execute_transactions:
402
+ return
403
+
404
+ # Find transaction data in config or generate it
405
+ transaction_data_list = [td for td in self.config.transactions_data if td.transaction_name == transaction.title]
406
+
407
+ # If no data found in config, generate a set of parameters using DataFactory
408
+ if not transaction_data_list:
409
+ # Generate parameters based on transaction properties
410
+ params = {}
411
+ for key, prop in transaction.properties.items():
412
+ # Use DataFactory to generate appropriate values based on property type
413
+ if isinstance(prop, dict) and 'type' in prop:
414
+ params[key] = DataFactory.generate_value_for_type(prop['type'], prop)
415
+ else:
416
+ # If type is not specified, use a simple string
417
+ params[key] = f'test_{key}'
418
+
419
+ # Execute the transaction with generated parameters
420
+ self._request(
421
+ 'POST',
422
+ f'transactions/{transaction.title}/',
423
+ json=params,
424
+ with_auth=with_auth,
425
+ ignore_status_code=ignore_status_code,
426
+ )
427
+ else:
428
+ # Execute the transaction with each set of parameters from config
429
+ for td in transaction_data_list:
430
+ self._request(
431
+ 'POST',
432
+ f'transactions/{transaction.title}/',
433
+ json=td.input_params,
434
+ with_auth=with_auth,
435
+ ignore_status_code=self.config.ignore_transaction_execution_errors,
436
+ )
437
+
438
+ def _request(
439
+ self,
440
+ method: str,
441
+ path: str,
442
+ *,
443
+ with_auth: bool = True,
444
+ ignore_status_code: bool = False,
445
+ **kwargs: Any,
446
+ ) -> Any:
447
+ base_url = self.base_url.rstrip('/')
448
+ path = path.lstrip('/')
449
+ url = f'{base_url}/{path}'
450
+ params: dict[str, Any] = {
451
+ 'timeout': self.config.request_timeout,
452
+ }
453
+ params.update(kwargs)
454
+ _headers: dict[str, Any] = {**self.config.headers}
455
+
456
+ if with_auth:
457
+ # Priority order for authentication:
458
+ # 1. Environment variable
459
+ # 2. Token from login
460
+ # 3. Auth headers from config
461
+
462
+ if self.config.env_authorization:
463
+ # Use token from environment variable
464
+ _headers['Authorization'] = self.config.env_authorization
465
+ elif self.config.token:
466
+ # Use token from login
467
+ _headers['Authorization'] = self.config.token
468
+ else:
469
+ # Fall back to auth_headers from config
470
+ _headers.update(self.config.auth_headers)
471
+
472
+ _headers.update(kwargs.get('headers', {}))
473
+ params['headers'] = _headers
474
+ response = httpx.request(method, url, **params)
475
+
476
+ if not ignore_status_code:
477
+ response.raise_for_status()
478
+
479
+ log = OperationLog.from_response(
480
+ response,
481
+ auth_headers=self.config.auth_headers,
482
+ ignore_class_version=True,
483
+ ignore_object_version=True,
484
+ )
485
+ self.logs.append(log)
486
+
487
+ if self.config.extend_output:
488
+ color = 'green' if response.status_code < status.HTTP_400_BAD_REQUEST else 'yellow'
489
+ color = color if response.status_code < status.HTTP_500_INTERNAL_SERVER_ERROR else 'red'
490
+ rprint(f'[{color}]{response.status_code} {method} {path}[/{color}]')
491
+ else:
492
+ rprint('.', end='')
493
+
494
+ try:
495
+ data = response.json()
496
+ except Exception:
497
+ data = response.text
498
+
499
+ return data
@@ -0,0 +1,12 @@
1
+ import json
2
+ from pathlib import Path
3
+
4
+ from amsdal_cli.commands.api_check.operation_log import BytesJSONEncoder
5
+ from amsdal_cli.commands.api_check.operation_log import OperationLog
6
+
7
+
8
+ def save(logs: list[OperationLog], destination: Path) -> None:
9
+ destination.parent.mkdir(exist_ok=True, parents=True)
10
+ # Convert OperationLog objects to dictionaries before serialization
11
+ serializable_logs = [log.model_dump() for log in logs]
12
+ destination.write_text(json.dumps(serializable_logs, cls=BytesJSONEncoder))
@@ -47,9 +47,9 @@ class BuildMixin:
47
47
  def copy_class_models(models_source_path: Path) -> None:
48
48
  from amsdal.configs.main import settings
49
49
 
50
- shutil.copytree(
50
+ shutil.copytree( # type: ignore[type-var]
51
51
  models_source_path,
52
- settings.USER_MODELS_MODULE_PATH, # type: ignore[arg-type]
52
+ settings.USER_MODELS_MODULE_PATH,
53
53
  dirs_exist_ok=True,
54
54
  )
55
55
 
@@ -8,7 +8,7 @@ from amsdal_utils.config.manager import AmsdalConfigManager
8
8
  from rich import print as rprint
9
9
  from typer import Option
10
10
 
11
- COMMANDS_DO_NOT_REQUIRE_APP_PATH = ('new', 'n')
11
+ COMMANDS_DO_NOT_REQUIRE_APP_PATH = ('new', 'n', 'api-check')
12
12
 
13
13
 
14
14
  def init_app_context(
@@ -123,17 +123,17 @@ def _generate_values_for_type(
123
123
 
124
124
  if test_data_type == TestDataType.RANDOM:
125
125
  dt = FAKER.date_time(tzinfo=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
126
- return ast.Constant(value=dt)
126
+ return ast.Constant(value=dt) # type: ignore[arg-type]
127
127
  if test_data_type == TestDataType.DYNAMIC:
128
128
  return _faker_call('date_time')
129
129
 
130
130
  elif data_type == 'date':
131
131
  if test_data_type == TestDataType.DUMMY:
132
- return ast.Constant(value=date(2023, 1, 1))
132
+ return ast.Constant(value=date(2023, 1, 1)) # type: ignore[arg-type]
133
133
 
134
134
  if test_data_type == TestDataType.RANDOM:
135
135
  dt = FAKER.date_time(tzinfo=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
136
- return ast.Constant(value=dt.date())
136
+ return ast.Constant(value=dt.date()) # type: ignore[arg-type]
137
137
 
138
138
  if test_data_type == TestDataType.DYNAMIC:
139
139
  return _faker_call('date')
@@ -1,17 +1,13 @@
1
1
  import asyncio
2
- from collections import defaultdict
3
2
  from pathlib import Path
4
- from typing import TYPE_CHECKING
5
3
  from typing import Annotated
6
4
 
7
5
  import typer
8
- from amsdal_models.migration.data_classes import ModuleType
6
+ from amsdal_utils.models.enums import ModuleType
9
7
  from rich import print as rprint
10
8
 
11
9
  from amsdal_cli.commands.migrations.app import sub_app
12
-
13
- if TYPE_CHECKING:
14
- from amsdal_models.migration.data_classes import MigrationFile
10
+ from amsdal_cli.commands.migrations.utils import render_migrations_list
15
11
 
16
12
 
17
13
  def _sync_apply(
@@ -81,13 +77,11 @@ def _sync_apply(
81
77
 
82
78
  if reverted:
83
79
  rprint(rich_warning('Migrations reverted'))
84
-
85
- _render([item.migration for item in reverted], color='yellow')
80
+ render_migrations_list([item.migration for item in reverted], color='yellow', is_migrated=False)
86
81
 
87
82
  if applied:
88
83
  rprint(rich_success('Migrations applied'))
89
-
90
- _render([item.migration for item in applied], color='green')
84
+ render_migrations_list([item.migration for item in applied], color='green', is_migrated=True)
91
85
 
92
86
  amsdal_manager.teardown()
93
87
 
@@ -160,48 +154,16 @@ async def _async_sync_apply(
160
154
 
161
155
  if reverted:
162
156
  rprint(rich_warning('Migrations reverted'))
163
-
164
- _render([item.migration for item in reverted], color='yellow')
157
+ render_migrations_list([item.migration for item in reverted], color='yellow', is_migrated=False)
165
158
 
166
159
  if applied:
167
160
  rprint(rich_success('Migrations applied'))
168
-
169
- _render([item.migration for item in applied], color='green')
161
+ render_migrations_list([item.migration for item in applied], color='green', is_migrated=True)
170
162
  finally:
171
163
  await amsdal_manager.teardown()
172
164
  AsyncAmsdalManager.invalidate()
173
165
 
174
166
 
175
- def _render(migrations: list['MigrationFile'], color: str = 'yellow') -> None:
176
- migrations_per_type = defaultdict(list)
177
-
178
- for migration in migrations:
179
- migrations_per_type[migration.type].append(migration)
180
-
181
- if ModuleType.CORE in migrations_per_type:
182
- rprint(f'[{color}]Core:[/{color}]')
183
-
184
- for migration in migrations_per_type[ModuleType.CORE]:
185
- rprint(f' - [{color}]{migration.path.name}[/{color}]')
186
-
187
- if ModuleType.CONTRIB in migrations_per_type:
188
- rprint(f'[{color}]Contrib:[/{color}]')
189
-
190
- for migration in migrations_per_type[ModuleType.CONTRIB]:
191
- if migration.module:
192
- contrib_name = '.'.join(migration.module.split('.')[:-2])
193
- else:
194
- contrib_name = 'N/A'
195
-
196
- rprint(f' - [{color}]{contrib_name}: {migration.path.name}[/{color}]')
197
-
198
- if ModuleType.USER in migrations_per_type:
199
- rprint(f'[{color}]App:[/{color}]')
200
-
201
- for migration in migrations_per_type[ModuleType.USER]:
202
- rprint(f' - [{color}]{migration.path.name}[/{color}]')
203
-
204
-
205
167
  @sub_app.command(name='apply, apl, ap')
206
168
  def apply_migrations(
207
169
  ctx: typer.Context,