cornflow 2.0.0a13__py3-none-any.whl → 2.0.0a14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. cornflow/app.py +3 -1
  2. cornflow/cli/__init__.py +4 -0
  3. cornflow/cli/actions.py +4 -0
  4. cornflow/cli/config.py +4 -0
  5. cornflow/cli/migrations.py +13 -8
  6. cornflow/cli/permissions.py +4 -0
  7. cornflow/cli/roles.py +4 -0
  8. cornflow/cli/schemas.py +5 -0
  9. cornflow/cli/service.py +260 -147
  10. cornflow/cli/tools/api_generator.py +13 -10
  11. cornflow/cli/tools/endpoint_tools.py +191 -196
  12. cornflow/cli/tools/models_tools.py +87 -60
  13. cornflow/cli/tools/schema_generator.py +161 -67
  14. cornflow/cli/tools/schemas_tools.py +4 -5
  15. cornflow/cli/users.py +8 -0
  16. cornflow/cli/views.py +4 -0
  17. cornflow/commands/dag.py +3 -2
  18. cornflow/commands/schemas.py +6 -4
  19. cornflow/commands/users.py +12 -17
  20. cornflow/config.py +3 -2
  21. cornflow/endpoints/dag.py +27 -25
  22. cornflow/endpoints/data_check.py +102 -164
  23. cornflow/endpoints/example_data.py +9 -3
  24. cornflow/endpoints/execution.py +27 -23
  25. cornflow/endpoints/health.py +4 -5
  26. cornflow/endpoints/instance.py +39 -12
  27. cornflow/endpoints/meta_resource.py +4 -5
  28. cornflow/shared/airflow.py +157 -0
  29. cornflow/shared/authentication/auth.py +73 -42
  30. cornflow/shared/const.py +9 -0
  31. cornflow/shared/databricks.py +10 -10
  32. cornflow/shared/exceptions.py +3 -1
  33. cornflow/shared/utils_tables.py +36 -8
  34. cornflow/shared/validators.py +1 -1
  35. cornflow/tests/custom_test_case.py +4 -4
  36. cornflow/tests/unit/test_alarms.py +1 -2
  37. cornflow/tests/unit/test_cases.py +4 -7
  38. cornflow/tests/unit/test_executions.py +29 -20
  39. cornflow/tests/unit/test_log_in.py +46 -9
  40. cornflow/tests/unit/test_tables.py +3 -3
  41. cornflow/tests/unit/tools.py +31 -13
  42. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/METADATA +2 -2
  43. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/RECORD +46 -45
  44. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/WHEEL +1 -1
  45. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/entry_points.txt +0 -0
  46. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/top_level.txt +0 -0
cornflow/endpoints/dag.py CHANGED
@@ -2,6 +2,7 @@
2
2
  Internal endpoint for getting and posting execution data
3
3
  These are the endpoints used by airflow in its communication with cornflow
4
4
  """
5
+
5
6
  # Import from libraries
6
7
  from cornflow_client.constants import SOLUTION_SCHEMA
7
8
  from flask import current_app
@@ -17,7 +18,6 @@ from cornflow.schemas.execution import (
17
18
  ExecutionDagPostRequest,
18
19
  ExecutionDagRequest,
19
20
  ExecutionDetailsEndpointResponse,
20
- ExecutionSchema,
21
21
  )
22
22
 
23
23
  from cornflow.shared.authentication import Auth, authenticate
@@ -85,7 +85,7 @@ class DAGDetailEndpoint(BaseMetaResource):
85
85
  @doc(description="Edit an execution", tags=["DAGs"])
86
86
  @authenticate(auth_class=Auth())
87
87
  @use_kwargs(ExecutionDagRequest, location="json")
88
- def put(self, idx, **req_data):
88
+ def put(self, idx, **kwargs):
89
89
  """
90
90
  API method to write the results of the execution
91
91
  It requires authentication to be passed in the form of a token that has to be linked to
@@ -95,13 +95,20 @@ class DAGDetailEndpoint(BaseMetaResource):
95
95
  :return: A dictionary with a message (body) and an integer with the HTTP status code
96
96
  :rtype: Tuple(dict, integer)
97
97
  """
98
- solution_schema = req_data.pop("solution_schema", "pulp")
98
+ execution = ExecutionModel.get_one_object(user=self.get_user(), idx=idx)
99
+ if execution is None:
100
+ err = "The execution does not exist."
101
+ raise ObjectDoesNotExist(
102
+ error=err,
103
+ log_txt=f"Error while user {self.get_user()} tries to edit execution {idx}."
104
+ + err,
105
+ )
106
+
107
+ solution_schema = execution.schema
99
108
 
100
- # TODO: the solution_schema maybe we should get it from the created execution_id?
101
- # at least, check they have the same schema-name
102
109
  # Check data format
103
- data = req_data.get("data")
104
- checks = req_data.get("checks")
110
+ data = kwargs.get("data")
111
+ checks = kwargs.get("checks")
105
112
  if data is None:
106
113
  # only check format if executions_results exist
107
114
  solution_schema = None
@@ -111,24 +118,19 @@ class DAGDetailEndpoint(BaseMetaResource):
111
118
  if solution_schema is not None:
112
119
  config = current_app.config
113
120
 
114
- solution_schema = DeployedOrch.get_one_schema(config, solution_schema, SOLUTION_SCHEMA)
121
+ solution_schema = DeployedOrch.get_one_schema(
122
+ config, solution_schema, SOLUTION_SCHEMA
123
+ )
115
124
  solution_errors = json_schema_validate_as_string(solution_schema, data)
116
125
 
117
126
  if solution_errors:
118
127
  raise InvalidData(
119
128
  payload=dict(jsonschema_errors=solution_errors),
120
129
  log_txt=f"Error while user {self.get_user()} tries to edit execution {idx}. "
121
- f"Solution data do not match the jsonschema.",
130
+ f"Solution data do not match the jsonschema.",
122
131
  )
123
- execution = ExecutionModel.get_one_object(user=self.get_user(), idx=idx)
124
- if execution is None:
125
- err = "The execution does not exist."
126
- raise ObjectDoesNotExist(
127
- error=err,
128
- log_txt=f"Error while user {self.get_user()} tries to edit execution {idx}."
129
- + err,
130
- )
131
- state = req_data.get("state", EXEC_STATE_CORRECT)
132
+
133
+ state = kwargs.get("state", EXEC_STATE_CORRECT)
132
134
  new_data = dict(
133
135
  state=state,
134
136
  state_message=EXECUTION_STATE_MESSAGE_DICT[state],
@@ -141,10 +143,9 @@ class DAGDetailEndpoint(BaseMetaResource):
141
143
  new_data["data"] = data
142
144
  if checks is not None:
143
145
  new_data["checks"] = checks
144
- req_data.update(new_data)
145
- execution.update(req_data)
146
- # TODO: is this save necessary?
147
- execution.save()
146
+ kwargs.update(new_data)
147
+ execution.update(kwargs)
148
+
148
149
  current_app.logger.info(f"User {self.get_user()} edits execution {idx}")
149
150
  return {"message": "results successfully saved"}, 200
150
151
 
@@ -207,7 +208,6 @@ class DAGEndpointManual(BaseMetaResource):
207
208
 
208
209
  # Check data format
209
210
  data = kwargs.get("data")
210
- # TODO: create a function to validate and replace data/ execution_results
211
211
  if data is None:
212
212
  # only check format if executions_results exist
213
213
  solution_schema = None
@@ -215,14 +215,16 @@ class DAGEndpointManual(BaseMetaResource):
215
215
  solution_schema = "solve_model_dag"
216
216
  if solution_schema is not None:
217
217
  config = current_app.config
218
- solution_schema = DeployedOrch.get_one_schema(config, solution_schema, SOLUTION_SCHEMA)
218
+ solution_schema = DeployedOrch.get_one_schema(
219
+ config, solution_schema, SOLUTION_SCHEMA
220
+ )
219
221
  solution_errors = json_schema_validate_as_string(solution_schema, data)
220
222
 
221
223
  if solution_errors:
222
224
  raise InvalidData(
223
225
  payload=dict(jsonschema_errors=solution_errors),
224
226
  log_txt=f"Error while user {self.get_user()} tries to manually create an execution. "
225
- f"Solution data do not match the jsonschema.",
227
+ f"Solution data do not match the jsonschema.",
226
228
  )
227
229
 
228
230
  kwargs_copy = dict(kwargs)
@@ -3,6 +3,7 @@ External endpoints to launch the solution check on an execution
3
3
  """
4
4
 
5
5
  # Import from libraries
6
+ # TODO: CHANGE BEFORE MERGING
6
7
  from cornflow_client.airflow.api import Airflow
7
8
  from cornflow_client.constants import INSTANCE_SCHEMA, SOLUTION_SCHEMA
8
9
  from flask import request, current_app
@@ -14,6 +15,9 @@ from cornflow.models import InstanceModel, ExecutionModel, CaseModel, DeployedOr
14
15
  from cornflow.schemas.execution import ExecutionDetailsEndpointResponse
15
16
  from cornflow.shared.authentication import Auth, authenticate
16
17
  from cornflow.shared.const import (
18
+ AIRFLOW_ERROR_MSG,
19
+ AIRFLOW_NOT_REACHABLE_MSG,
20
+ DAG_PAUSED_MSG,
17
21
  EXEC_STATE_QUEUED,
18
22
  EXEC_STATE_ERROR,
19
23
  EXEC_STATE_ERROR_START,
@@ -32,6 +36,80 @@ from cornflow.shared.exceptions import (
32
36
  from cornflow.shared.validators import json_schema_validate_as_string
33
37
 
34
38
 
39
+ def _run_airflow_data_check(
40
+ af_client: Airflow,
41
+ execution: ExecutionModel,
42
+ schema: str,
43
+ user,
44
+ context_str: str,
45
+ **run_dag_kwargs,
46
+ ):
47
+ """
48
+ Helper function to check Airflow status and run a data check DAG.
49
+
50
+ :param af_client: Initialized Airflow client.
51
+ :param execution: The ExecutionModel object to update.
52
+ :param schema: The name of the schema/DAG to run.
53
+ :param user: The user object performing the action.
54
+ :param context_str: A string describing the context (e.g., "execution {id}") for logging.
55
+ :param run_dag_kwargs: Additional keyword arguments for af_client.run_dag.
56
+ :return: None. Updates execution object in place and raises AirflowError on failure.
57
+ """
58
+ # Check Airflow liveness
59
+ if not af_client.is_alive():
60
+ current_app.logger.error(AIRFLOW_NOT_REACHABLE_MSG)
61
+ execution.update_state(EXEC_STATE_ERROR_START)
62
+ raise AirflowError(
63
+ error=AIRFLOW_NOT_REACHABLE_MSG,
64
+ payload=dict(
65
+ message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
66
+ state=EXEC_STATE_ERROR_START,
67
+ ),
68
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
69
+ + AIRFLOW_NOT_REACHABLE_MSG,
70
+ )
71
+
72
+ # Check if DAG is paused
73
+ schema_info = af_client.get_dag_info(schema)
74
+ info = schema_info.json()
75
+ if info.get("is_paused", False):
76
+ current_app.logger.error(DAG_PAUSED_MSG)
77
+ execution.update_state(EXEC_STATE_ERROR_START)
78
+ raise AirflowError(
79
+ error=DAG_PAUSED_MSG,
80
+ payload=dict(
81
+ message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
82
+ state=EXEC_STATE_ERROR_START,
83
+ ),
84
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
85
+ + DAG_PAUSED_MSG,
86
+ )
87
+
88
+ # Run the DAG
89
+ try:
90
+ response = af_client.run_workflow(
91
+ execution.id, orch_name=schema, checks_only=True, **run_dag_kwargs
92
+ )
93
+ except AirflowError as err:
94
+ error = f"{AIRFLOW_ERROR_MSG} {err}"
95
+ current_app.logger.error(error)
96
+ execution.update_state(EXEC_STATE_ERROR)
97
+ raise AirflowError(
98
+ error=error,
99
+ payload=dict(
100
+ message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
101
+ state=EXEC_STATE_ERROR,
102
+ ),
103
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
104
+ + error,
105
+ )
106
+
107
+ # Update execution on success
108
+ af_data = response.json()
109
+ execution.dag_run_id = af_data.get("dag_run_id")
110
+ execution.update_state(EXEC_STATE_QUEUED)
111
+
112
+
35
113
  class DataCheckExecutionEndpoint(BaseMetaResource):
36
114
  """
37
115
  Endpoint used to execute the instance and solution checks on an execution
@@ -60,13 +138,15 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
60
138
  :rtype: Tuple(dict, integer)
61
139
  """
62
140
  config = current_app.config
141
+ user = self.get_user()
142
+ context_str = f"execution {idx}"
63
143
 
64
144
  execution = ExecutionModel.get_one_object(user=self.get_user(), idx=idx)
65
145
  if execution is None:
66
146
  err = "The execution to check does not exist"
67
147
  raise ObjectDoesNotExist(
68
148
  error=err,
69
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
149
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
70
150
  + err,
71
151
  )
72
152
 
@@ -77,7 +157,7 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
77
157
  err = "The execution is still running"
78
158
  raise InvalidUsage(
79
159
  error=err,
80
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
160
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
81
161
  + err,
82
162
  )
83
163
 
@@ -86,61 +166,12 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
86
166
  execution.update_state(EXEC_STATE_NOT_RUN)
87
167
  return execution, 201
88
168
 
89
- # We now try to launch the task in airflow
169
+ # Initialize Airflow client
90
170
  af_client = Airflow.from_config(config)
91
- if not af_client.is_alive():
92
- err = "Airflow is not accessible"
93
- current_app.logger.error(err)
94
- execution.update_state(EXEC_STATE_ERROR_START)
95
- raise AirflowError(
96
- error=err,
97
- payload=dict(
98
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
99
- state=EXEC_STATE_ERROR_START,
100
- ),
101
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
102
- + err,
103
- )
104
- # ask airflow if dag_name exists
105
- schema_info = af_client.get_orch_info(schema)
106
-
107
- info = schema_info.json()
108
- if info["is_paused"]:
109
- err = "The dag exists but it is paused in airflow"
110
- current_app.logger.error(err)
111
- execution.update_state(EXEC_STATE_ERROR_START)
112
- raise AirflowError(
113
- error=err,
114
- payload=dict(
115
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
116
- state=EXEC_STATE_ERROR_START,
117
- ),
118
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
119
- + err,
120
- )
121
171
 
122
- try:
123
- response = af_client.run_workflow(
124
- execution.id, orch_name=schema, checks_only=True
125
- )
126
- except AirflowError as err:
127
- error = "Airflow responded with an error: {}".format(err)
128
- current_app.logger.error(error)
129
- execution.update_state(EXEC_STATE_ERROR)
130
- raise AirflowError(
131
- error=error,
132
- payload=dict(
133
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
134
- state=EXEC_STATE_ERROR,
135
- ),
136
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
137
- + error,
138
- )
172
+ # Call the shared Airflow execution logic
173
+ _run_airflow_data_check(af_client, execution, schema, user, context_str)
139
174
 
140
- # if we succeed, we register the dag_run_id in the execution table:
141
- af_data = response.json()
142
- execution.run_id = af_data["dag_run_id"]
143
- execution.update_state(EXEC_STATE_QUEUED)
144
175
  current_app.logger.info(
145
176
  "User {} launches checks of execution {}".format(
146
177
  self.get_user_id(), execution.id
@@ -180,13 +211,15 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
180
211
  :rtype: Tuple(dict, integer)
181
212
  """
182
213
  config = current_app.config
214
+ user = self.get_user()
215
+ context_str = f"instance {idx}"
183
216
 
184
217
  instance = InstanceModel.get_one_object(user=self.get_user(), idx=idx)
185
218
  if instance is None:
186
219
  err = "The instance to check does not exist"
187
220
  raise ObjectDoesNotExist(
188
221
  error=err,
189
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
222
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
190
223
  + err,
191
224
  )
192
225
  payload = dict(
@@ -197,68 +230,19 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
197
230
  )
198
231
  schema = instance.schema
199
232
 
200
- execution, status_code = self.post_list(data=payload)
233
+ execution, _ = self.post_list(data=payload)
201
234
 
202
235
  # this allows testing without airflow interaction:
203
236
  if request.args.get("run", "1") == "0":
204
237
  execution.update_state(EXEC_STATE_NOT_RUN)
205
238
  return execution, 201
206
239
 
207
- # We now try to launch the task in airflow
240
+ # Initialize Airflow client
208
241
  af_client = Airflow.from_config(config)
209
- if not af_client.is_alive():
210
- err = "Airflow is not accessible"
211
- current_app.logger.error(err)
212
- execution.update_state(EXEC_STATE_ERROR_START)
213
- raise AirflowError(
214
- error=err,
215
- payload=dict(
216
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
217
- state=EXEC_STATE_ERROR_START,
218
- ),
219
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
220
- + err,
221
- )
222
- # ask airflow if dag_name exists
223
- schema_info = af_client.get_orch_info(schema)
224
-
225
- info = schema_info.json()
226
- if info["is_paused"]:
227
- err = "The dag exists but it is paused in airflow"
228
- current_app.logger.error(err)
229
- execution.update_state(EXEC_STATE_ERROR_START)
230
- raise AirflowError(
231
- error=err,
232
- payload=dict(
233
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
234
- state=EXEC_STATE_ERROR_START,
235
- ),
236
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
237
- + err,
238
- )
239
242
 
240
- try:
241
- response = af_client.run_workflow(
242
- execution.id, orch_name=schema, checks_only=True
243
- )
244
- except AirflowError as err:
245
- error = "Airflow responded with an error: {}".format(err)
246
- current_app.logger.error(error)
247
- execution.update_state(EXEC_STATE_ERROR)
248
- raise AirflowError(
249
- error=error,
250
- payload=dict(
251
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
252
- state=EXEC_STATE_ERROR,
253
- ),
254
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
255
- + error,
256
- )
243
+ # Call the shared Airflow execution logic
244
+ _run_airflow_data_check(af_client, execution, schema, user, context_str)
257
245
 
258
- # if we succeed, we register the dag_run_id in the execution table:
259
- af_data = response.json()
260
- execution.run_id = af_data["dag_run_id"]
261
- execution.update_state(EXEC_STATE_QUEUED)
262
246
  current_app.logger.info(
263
247
  "User {} creates instance check execution {}".format(
264
248
  self.get_user_id(), execution.id
@@ -298,13 +282,15 @@ class DataCheckCaseEndpoint(BaseMetaResource):
298
282
  :rtype: Tuple(dict, integer)
299
283
  """
300
284
  config = current_app.config
285
+ user = self.get_user()
286
+ context_str = f"case {idx}"
301
287
 
302
288
  case = CaseModel.get_one_object(user=self.get_user(), idx=idx)
303
289
  if case is None:
304
290
  err = "The case to check does not exist"
305
291
  raise ObjectDoesNotExist(
306
292
  error=err,
307
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
293
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
308
294
  + err,
309
295
  )
310
296
 
@@ -335,7 +321,7 @@ class DataCheckCaseEndpoint(BaseMetaResource):
335
321
  if validation_errors:
336
322
  raise InvalidData(
337
323
  payload=dict(jsonschema_errors=validation_errors),
338
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
324
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
339
325
  f"Instance data does not match the jsonschema.",
340
326
  )
341
327
 
@@ -364,7 +350,7 @@ class DataCheckCaseEndpoint(BaseMetaResource):
364
350
  if validation_errors:
365
351
  raise InvalidData(
366
352
  payload=dict(jsonschema_errors=validation_errors),
367
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
353
+ log_txt=f"Error while user {user} tries to run data checks on {context_str}. "
368
354
  f"Solution data does not match the jsonschema.",
369
355
  )
370
356
 
@@ -378,62 +364,14 @@ class DataCheckCaseEndpoint(BaseMetaResource):
378
364
  execution.update_state(EXEC_STATE_NOT_RUN)
379
365
  return execution, 201
380
366
 
381
- # We now try to launch the task in airflow
367
+ # Initialize Airflow client
382
368
  af_client = Airflow.from_config(config)
383
- if not af_client.is_alive():
384
- err = "Airflow is not accessible"
385
- current_app.logger.error(err)
386
- execution.update_state(EXEC_STATE_ERROR_START)
387
- raise AirflowError(
388
- error=err,
389
- payload=dict(
390
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
391
- state=EXEC_STATE_ERROR_START,
392
- ),
393
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
394
- + err,
395
- )
396
- # ask airflow if dag_name exists
397
- schema_info = af_client.get_orch_info(schema)
398
-
399
- info = schema_info.json()
400
- if info["is_paused"]:
401
- err = "The dag exists but it is paused in airflow"
402
- current_app.logger.error(err)
403
- execution.update_state(EXEC_STATE_ERROR_START)
404
- raise AirflowError(
405
- error=err,
406
- payload=dict(
407
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
408
- state=EXEC_STATE_ERROR_START,
409
- ),
410
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
411
- + err,
412
- )
413
369
 
414
- try:
415
- response = af_client.run_workflow(
416
- execution.id, orch_name=schema, checks_only=True, case_id=idx
417
- )
418
-
419
- except AirflowError as err:
420
- error = "Airflow responded with an error: {}".format(err)
421
- current_app.logger.error(error)
422
- execution.update_state(EXEC_STATE_ERROR)
423
- raise AirflowError(
424
- error=error,
425
- payload=dict(
426
- message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
427
- state=EXEC_STATE_ERROR,
428
- ),
429
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
430
- + error,
431
- )
370
+ # Call the shared Airflow execution logic, passing case_id
371
+ _run_airflow_data_check(
372
+ af_client, execution, schema, user, context_str, case_id=idx
373
+ )
432
374
 
433
- # if we succeed, we register the dag_run_id in the execution table:
434
- af_data = response.json()
435
- execution.run_id = af_data["dag_run_id"]
436
- execution.update_state(EXEC_STATE_QUEUED)
437
375
  current_app.logger.info(
438
376
  "User {} creates case check execution {}".format(
439
377
  self.get_user_id(), execution.id
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Endpoints to get the example data from a DAG
3
3
  """
4
+
4
5
  import json
5
6
 
6
7
  from cornflow_client.airflow.api import Airflow
@@ -11,7 +12,12 @@ from cornflow.endpoints.meta_resource import BaseMetaResource
11
12
  from cornflow.models import PermissionsDAG
12
13
  from cornflow.schemas.example_data import ExampleListData, ExampleDetailData
13
14
  from cornflow.shared.authentication import Auth, authenticate
14
- from cornflow.shared.const import VIEWER_ROLE, PLANNER_ROLE, ADMIN_ROLE
15
+ from cornflow.shared.const import (
16
+ AIRFLOW_NOT_REACHABLE_MSG,
17
+ VIEWER_ROLE,
18
+ PLANNER_ROLE,
19
+ ADMIN_ROLE,
20
+ )
15
21
  from cornflow.shared.exceptions import AirflowError, NoPermission, ObjectDoesNotExist
16
22
 
17
23
 
@@ -44,7 +50,7 @@ class ExampleDataListEndpoint(BaseMetaResource):
44
50
  current_app.logger.error(
45
51
  "Airflow not accessible when getting data {}".format(dag_name)
46
52
  )
47
- raise AirflowError(error="Airflow is not accessible")
53
+ raise AirflowError(error=f"{AIRFLOW_NOT_REACHABLE_MSG}")
48
54
 
49
55
  # try airflow and see if dag_name exists
50
56
  af_client.get_orch_info(dag_name)
@@ -87,7 +93,7 @@ class ExampleDataDetailEndpoint(BaseMetaResource):
87
93
  current_app.logger.error(
88
94
  "Airflow not accessible when getting data {}".format(dag_name)
89
95
  )
90
- raise AirflowError(error="Airflow is not accessible")
96
+ raise AirflowError(error=f"{AIRFLOW_NOT_REACHABLE_MSG}")
91
97
 
92
98
  # try airflow and see if dag_name exists
93
99
  af_client.get_orch_info(dag_name)