cornflow 2.0.0a10__py3-none-any.whl → 2.0.0a12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. airflow_config/airflow_local_settings.py +1 -1
  2. cornflow/app.py +8 -3
  3. cornflow/cli/migrations.py +23 -3
  4. cornflow/cli/service.py +18 -18
  5. cornflow/cli/utils.py +16 -1
  6. cornflow/commands/dag.py +1 -1
  7. cornflow/config.py +13 -8
  8. cornflow/endpoints/__init__.py +8 -2
  9. cornflow/endpoints/alarms.py +66 -2
  10. cornflow/endpoints/data_check.py +53 -26
  11. cornflow/endpoints/execution.py +387 -132
  12. cornflow/endpoints/login.py +81 -63
  13. cornflow/endpoints/meta_resource.py +11 -3
  14. cornflow/migrations/versions/999b98e24225.py +34 -0
  15. cornflow/models/base_data_model.py +4 -32
  16. cornflow/models/execution.py +2 -3
  17. cornflow/models/meta_models.py +28 -22
  18. cornflow/models/user.py +7 -10
  19. cornflow/schemas/alarms.py +8 -0
  20. cornflow/schemas/execution.py +1 -1
  21. cornflow/schemas/query.py +2 -1
  22. cornflow/schemas/user.py +5 -20
  23. cornflow/shared/authentication/auth.py +201 -264
  24. cornflow/shared/const.py +3 -14
  25. cornflow/shared/databricks.py +5 -1
  26. cornflow/tests/const.py +1 -0
  27. cornflow/tests/custom_test_case.py +77 -26
  28. cornflow/tests/unit/test_actions.py +2 -2
  29. cornflow/tests/unit/test_alarms.py +55 -1
  30. cornflow/tests/unit/test_apiview.py +108 -3
  31. cornflow/tests/unit/test_cases.py +20 -29
  32. cornflow/tests/unit/test_cli.py +6 -5
  33. cornflow/tests/unit/test_commands.py +3 -3
  34. cornflow/tests/unit/test_dags.py +5 -6
  35. cornflow/tests/unit/test_executions.py +443 -123
  36. cornflow/tests/unit/test_instances.py +14 -2
  37. cornflow/tests/unit/test_instances_file.py +1 -1
  38. cornflow/tests/unit/test_licenses.py +1 -1
  39. cornflow/tests/unit/test_log_in.py +230 -207
  40. cornflow/tests/unit/test_permissions.py +8 -8
  41. cornflow/tests/unit/test_roles.py +48 -10
  42. cornflow/tests/unit/test_schemas.py +1 -1
  43. cornflow/tests/unit/test_tables.py +7 -7
  44. cornflow/tests/unit/test_token.py +19 -5
  45. cornflow/tests/unit/test_users.py +22 -6
  46. cornflow/tests/unit/tools.py +75 -10
  47. {cornflow-2.0.0a10.dist-info → cornflow-2.0.0a12.dist-info}/METADATA +16 -15
  48. {cornflow-2.0.0a10.dist-info → cornflow-2.0.0a12.dist-info}/RECORD +51 -51
  49. {cornflow-2.0.0a10.dist-info → cornflow-2.0.0a12.dist-info}/WHEEL +1 -1
  50. cornflow/endpoints/execution_databricks.py +0 -808
  51. {cornflow-2.0.0a10.dist-info → cornflow-2.0.0a12.dist-info}/entry_points.txt +0 -0
  52. {cornflow-2.0.0a10.dist-info → cornflow-2.0.0a12.dist-info}/top_level.txt +0 -0
@@ -19,5 +19,5 @@ STATE_COLORS = {
19
19
  from airflow.www.utils import UIAlert
20
20
 
21
21
  DASHBOARD_UIALERTS = [
22
- UIAlert("Welcome! This is the backend of your Cornflow environment. Airflow™ is a platform created by the community to programmatically author, schedule and monitor workflows."),
22
+ UIAlert("Welcome! This is the backend of your cornflow environment. Airflow™ is a platform created by the community to programmatically author, schedule and monitor workflows."),
23
23
  ]
cornflow/app.py CHANGED
@@ -37,7 +37,7 @@ from cornflow.endpoints.signup import SignUpEndpoint
37
37
  from cornflow.shared import db, bcrypt
38
38
  from cornflow.shared.compress import init_compress
39
39
  from cornflow.shared.const import AUTH_DB, AUTH_LDAP, AUTH_OID
40
- from cornflow.shared.exceptions import initialize_errorhandlers
40
+ from cornflow.shared.exceptions import initialize_errorhandlers, ConfigurationError
41
41
  from cornflow.shared.log_config import log_config
42
42
 
43
43
 
@@ -62,11 +62,11 @@ def create_app(env_name="development", dataconn=None):
62
62
  CORS(app)
63
63
  bcrypt.init_app(app)
64
64
  db.init_app(app)
65
- migrate = Migrate(app=app, db=db)
65
+ Migrate(app=app, db=db)
66
66
 
67
67
  if "sqlite" in app.config["SQLALCHEMY_DATABASE_URI"]:
68
68
 
69
- def _fk_pragma_on_connect(dbapi_con, con_record):
69
+ def _fk_pragma_on_connect(dbapi_con, _con_record):
70
70
  dbapi_con.execute("pragma foreign_keys=ON")
71
71
 
72
72
  with app.app_context():
@@ -100,6 +100,11 @@ def create_app(env_name="development", dataconn=None):
100
100
  api.add_resource(LoginEndpoint, "/login/", endpoint="login")
101
101
  elif auth_type == AUTH_OID:
102
102
  api.add_resource(LoginOpenAuthEndpoint, "/login/", endpoint="login")
103
+ else:
104
+ raise ConfigurationError(
105
+ error="Invalid authentication type",
106
+ log_txt="Error while configuring authentication. The authentication type is not valid."
107
+ )
103
108
 
104
109
  initialize_errorhandlers(app)
105
110
  init_compress(app)
@@ -3,7 +3,7 @@ import os.path
3
3
 
4
4
  import click
5
5
  from cornflow.shared import db
6
- from flask_migrate import Migrate, migrate, upgrade, init
6
+ from flask_migrate import Migrate, migrate, upgrade, downgrade, init
7
7
 
8
8
  from .utils import get_app
9
9
 
@@ -28,7 +28,27 @@ def migrate_migrations():
28
28
 
29
29
 
30
30
  @migrations.command(name="upgrade", help="Apply migrations")
31
- def upgrade_migrations():
31
+ @click.option(
32
+ "-r", "--revision", type=str, help="The revision to upgrade to", default="head"
33
+ )
34
+ def upgrade_migrations(revision="head"):
35
+ app = get_app()
36
+ external = int(os.getenv("EXTERNAL_APP", 0))
37
+ if external == 0:
38
+ path = "./cornflow/migrations"
39
+ else:
40
+ path = f"./{os.getenv('EXTERNAL_APP_MODULE', 'external_app')}/migrations"
41
+
42
+ with app.app_context():
43
+ migration_client = Migrate(app=app, db=db, directory=path)
44
+ upgrade(revision=revision)
45
+
46
+
47
+ @migrations.command(name="downgrade", help="Downgrade migrations")
48
+ @click.option(
49
+ "-r", "--revision", type=str, help="The revision to downgrade to", default="-1"
50
+ )
51
+ def downgrade_migrations(revision="-1"):
32
52
  app = get_app()
33
53
  external = int(os.getenv("EXTERNAL_APP", 0))
34
54
  if external == 0:
@@ -38,7 +58,7 @@ def upgrade_migrations():
38
58
 
39
59
  with app.app_context():
40
60
  migration_client = Migrate(app=app, db=db, directory=path)
41
- upgrade()
61
+ downgrade(revision=revision)
42
62
 
43
63
 
44
64
  @migrations.command(
cornflow/cli/service.py CHANGED
@@ -6,6 +6,7 @@ from logging import error
6
6
 
7
7
 
8
8
  import click
9
+ from .utils import get_db_conn
9
10
  import cornflow
10
11
  from cornflow.app import create_app
11
12
  from cornflow.commands import (
@@ -18,9 +19,12 @@ from cornflow.commands import (
18
19
  )
19
20
  from cornflow.shared.const import (
20
21
  AUTH_DB,
22
+ AUTH_LDAP,
23
+ AUTH_OID,
21
24
  ADMIN_ROLE,
22
- DATABRICKS_BACKEND,
23
25
  SERVICE_ROLE,
26
+ PLANNER_ROLE,
27
+ DATABRICKS_BACKEND,
24
28
  AIRFLOW_BACKEND,
25
29
  )
26
30
  from cornflow.shared import db
@@ -44,7 +48,7 @@ def init_cornflow_service():
44
48
  # Global defaults and back-compat #
45
49
  ###################################
46
50
  # cornflow backend selection
47
- cornflow_backend = os.getenv("CORNFLOW_BACKEND", AIRFLOW_BACKEND)
51
+ cornflow_backend = os.getenv("CORNFLOW_BACKEND", str(AIRFLOW_BACKEND))
48
52
  os.environ["CORNFLOW_BACKEND"] = cornflow_backend
49
53
  cornflow_backend = int(cornflow_backend)
50
54
  # Airflow global default conn
@@ -73,15 +77,8 @@ def init_cornflow_service():
73
77
  os.environ["SECRET_KEY"] = os.getenv("FERNET_KEY", Fernet.generate_key().decode())
74
78
 
75
79
  # Cornflow db defaults
76
- cornflow_db_host = os.getenv("CORNFLOW_DB_HOST", "cornflow_db")
77
- cornflow_db_port = os.getenv("CORNFLOW_DB_PORT", "5432")
78
- cornflow_db_user = os.getenv("CORNFLOW_DB_USER", "cornflow")
79
- cornflow_db_password = os.getenv("CORNFLOW_DB_PASSWORD", "cornflow")
80
- cornflow_db = os.getenv("CORNFLOW_DB", "cornflow")
81
- cornflow_db_conn = os.getenv(
82
- "cornflow_db_conn",
83
- f"postgresql://{cornflow_db_user}:{cornflow_db_password}@{cornflow_db_host}:{cornflow_db_port}/{cornflow_db}",
84
- )
80
+ os.environ["DEFAULT_POSTGRES"] = "1"
81
+ cornflow_db_conn = get_db_conn()
85
82
  os.environ["DATABASE_URL"] = cornflow_db_conn
86
83
 
87
84
  # Platform auth config and service users
@@ -107,12 +104,12 @@ def init_cornflow_service():
107
104
  os.environ["SIGNUP_ACTIVATED"] = str(signup_activated)
108
105
  user_access_all_objects = os.getenv("USER_ACCESS_ALL_OBJECTS", 0)
109
106
  os.environ["USER_ACCESS_ALL_OBJECTS"] = str(user_access_all_objects)
110
- default_role = os.getenv("DEFAULT_ROLE", 2)
107
+ default_role = int(os.getenv("DEFAULT_ROLE", PLANNER_ROLE))
111
108
  os.environ["DEFAULT_ROLE"] = str(default_role)
112
109
 
113
110
  # Check LDAP parameters for active directory and show message
114
- if os.getenv("AUTH_TYPE") == 2:
115
- click.echo(
111
+ if os.getenv("AUTH_TYPE") == AUTH_LDAP:
112
+ print(
116
113
  "WARNING: Cornflow will be deployed with LDAP Authorization. Please review your ldap auth configuration."
117
114
  )
118
115
 
@@ -153,10 +150,11 @@ def init_cornflow_service():
153
150
  app = create_app(environment, cornflow_db_conn)
154
151
  with app.app_context():
155
152
  path = f"{os.path.dirname(cornflow.__file__)}/migrations"
156
- migrate = Migrate(app=app, db=db, directory=path)
153
+ Migrate(app=app, db=db, directory=path)
157
154
  upgrade()
158
155
  access_init_command(verbose=False)
159
- if auth == 1 or auth == 0:
156
+ if auth == AUTH_DB or auth == AUTH_OID:
157
+ # create cornflow admin user
160
158
  create_user_with_role(
161
159
  cornflow_admin_user,
162
160
  cornflow_admin_email,
@@ -218,7 +216,7 @@ def init_cornflow_service():
218
216
  migrate = Migrate(app=app, db=db, directory=path)
219
217
  upgrade()
220
218
  access_init_command(verbose=False)
221
- if auth == 1 or auth == 0:
219
+ if auth == AUTH_DB or auth == AUTH_OID:
222
220
  # create cornflow admin user
223
221
  create_user_with_role(
224
222
  cornflow_admin_user,
@@ -251,8 +249,10 @@ def init_cornflow_service():
251
249
  update_dag_registry_command(
252
250
  airflow_url, airflow_user, airflow_pwd, verbose=True
253
251
  )
254
- else:
252
+ elif cornflow_backend == DATABRICKS_BACKEND:
255
253
  register_dag_permissions_command(open_deployment, verbose=True)
254
+ else:
255
+ raise Exception("Selected backend not among valid options")
256
256
 
257
257
  os.system(
258
258
  f"/usr/local/bin/gunicorn -c python:cornflow.gunicorn "
cornflow/cli/utils.py CHANGED
@@ -6,7 +6,7 @@ import warnings
6
6
 
7
7
  def get_app():
8
8
  env = os.getenv("FLASK_ENV", "development")
9
- data_conn = os.getenv("DATABASE_URL", "sqlite:///cornflow.db")
9
+ data_conn = get_db_conn()
10
10
  if env == "production":
11
11
  warnings.filterwarnings("ignore")
12
12
  external = int(os.getenv("EXTERNAL_APP", 0))
@@ -24,3 +24,18 @@ def get_app():
24
24
  app = create_app(env, data_conn)
25
25
 
26
26
  return app
27
+
28
+
29
+ def get_db_conn():
30
+ if int(os.getenv("DEFAULT_POSTGRES", 0)) == 0:
31
+ return os.getenv("DATABASE_URL", "sqlite:///cornflow.db")
32
+ else:
33
+ cornflow_db_host = os.getenv("CORNFLOW_DB_HOST", "cornflow_db")
34
+ cornflow_db_port = os.getenv("CORNFLOW_DB_PORT", "5432")
35
+ cornflow_db_user = os.getenv("CORNFLOW_DB_USER", "cornflow")
36
+ cornflow_db_password = os.getenv("CORNFLOW_DB_PASSWORD", "cornflow")
37
+ cornflow_db = os.getenv("CORNFLOW_DB", "cornflow")
38
+ return os.getenv(
39
+ "cornflow_db_conn",
40
+ f"postgresql://{cornflow_db_user}:{cornflow_db_password}@{cornflow_db_host}:{cornflow_db_port}/{cornflow_db}",
41
+ )
cornflow/commands/dag.py CHANGED
@@ -80,7 +80,7 @@ def register_deployed_dags_command_test(dags: list = None, verbose: bool = False
80
80
  from cornflow_client import get_pulp_jsonschema, get_empty_schema
81
81
 
82
82
  if dags is None:
83
- dags = ["solve_model_dag", "gc", "timer"]
83
+ dags = ["solve_model_dag", "gc", "timer", "979073949072767" ]
84
84
 
85
85
  deployed_dag = [
86
86
  DeployedOrch(
cornflow/config.py CHANGED
@@ -1,7 +1,8 @@
1
1
  import os
2
- from .shared.const import AUTH_DB, PLANNER_ROLE, AIRFLOW_BACKEND
2
+ from .shared.const import AUTH_DB, PLANNER_ROLE, AUTH_OID
3
3
  from apispec import APISpec
4
4
  from apispec.ext.marshmallow import MarshmallowPlugin
5
+ from cornflow.shared.const import AIRFLOW_BACKEND, DATABRICKS_BACKEND
5
6
 
6
7
 
7
8
  class DefaultConfig(object):
@@ -72,15 +73,13 @@ class DefaultConfig(object):
72
73
  LDAP_PROTOCOL_VERSION = int(os.getenv("LDAP_PROTOCOL_VERSION", 3))
73
74
  LDAP_USE_TLS = os.getenv("LDAP_USE_TLS", "False")
74
75
 
75
- # OpenID login -> Default Azure
76
- OID_PROVIDER = os.getenv("OID_PROVIDER", 0)
77
- OID_CLIENT_ID = os.getenv("OID_CLIENT_ID")
78
- OID_TENANT_ID = os.getenv("OID_TENANT_ID")
79
- OID_ISSUER = os.getenv("OID_ISSUER")
76
+ # OpenID Connect configuration
77
+ OID_PROVIDER = os.getenv("OID_PROVIDER")
78
+ OID_EXPECTED_AUDIENCE = os.getenv("OID_EXPECTED_AUDIENCE")
80
79
 
81
80
  # APISPEC:
82
81
  APISPEC_SPEC = APISpec(
83
- title="Cornflow API docs",
82
+ title="cornflow API docs",
84
83
  version="v1",
85
84
  plugins=[MarshmallowPlugin()],
86
85
  openapi_version="2.0.0",
@@ -135,13 +134,18 @@ class Testing(DefaultConfig):
135
134
  OPEN_DEPLOYMENT = 1
136
135
  LOG_LEVEL = int(os.getenv("LOG_LEVEL", 10))
137
136
 
137
+ class TestingDatabricks(Testing):
138
+ CORNFLOW_BACKEND = DATABRICKS_BACKEND
139
+
138
140
 
139
141
  class TestingOpenAuth(Testing):
140
142
  """
141
143
  Configuration class for testing some edge cases with Open Auth login
142
144
  """
143
145
 
144
- AUTH_TYPE = 0
146
+ AUTH_TYPE = AUTH_OID
147
+ OID_PROVIDER = "https://test-provider.example.com"
148
+ OID_EXPECTED_AUDIENCE = "test-audience-id"
145
149
 
146
150
 
147
151
  class TestingApplicationRoot(Testing):
@@ -172,4 +176,5 @@ app_config = {
172
176
  "production": Production,
173
177
  "testing-oauth": TestingOpenAuth,
174
178
  "testing-root": TestingApplicationRoot,
179
+ "testing-databricks" : TestingDatabricks
175
180
  }
@@ -3,8 +3,9 @@ Initialization file for the endpoints module
3
3
  All references to endpoints should be imported from here
4
4
  The login resource gets created on app startup as it depends on configuration
5
5
  """
6
+
6
7
  from .action import ActionListEndpoint
7
- from .alarms import AlarmsEndpoint
8
+ from .alarms import AlarmsEndpoint, AlarmDetailEndpoint
8
9
  from .apiview import ApiViewListEndpoint
9
10
  from .case import (
10
11
  CaseEndpoint,
@@ -29,7 +30,7 @@ from .data_check import (
29
30
  DataCheckCaseEndpoint,
30
31
  )
31
32
  from .example_data import ExampleDataListEndpoint, ExampleDataDetailEndpoint
32
- from .execution_databricks import (
33
+ from .execution import (
33
34
  ExecutionEndpoint,
34
35
  ExecutionDetailsEndpoint,
35
36
  ExecutionStatusEndpoint,
@@ -225,6 +226,11 @@ alarms_resources = [
225
226
  urls="/alarms/",
226
227
  endpoint="alarms",
227
228
  ),
229
+ dict(
230
+ resource=AlarmDetailEndpoint,
231
+ urls="/alarms/<int:idx>/",
232
+ endpoint="alarms-detail",
233
+ ),
228
234
  dict(
229
235
  resource=MainAlarmsEndpoint,
230
236
  urls="/main-alarms/",
@@ -1,15 +1,19 @@
1
1
  # Imports from libraries
2
+ from flask import current_app
2
3
  from flask_apispec import doc, marshal_with, use_kwargs
3
4
 
4
5
  # Import from internal modules
5
6
  from cornflow.endpoints.meta_resource import BaseMetaResource
6
7
  from cornflow.models import AlarmsModel
7
8
  from cornflow.schemas.alarms import (
9
+ AlarmEditRequest,
8
10
  AlarmsResponse,
9
11
  AlarmsPostRequest,
10
- QueryFiltersAlarms
12
+ QueryFiltersAlarms,
11
13
  )
12
14
  from cornflow.shared.authentication import Auth, authenticate
15
+ from cornflow.shared.exceptions import AirflowError, ObjectDoesNotExist, InvalidData
16
+ from cornflow.shared.const import SERVICE_ROLE
13
17
 
14
18
 
15
19
  class AlarmsEndpoint(BaseMetaResource):
@@ -56,4 +60,64 @@ class AlarmsEndpoint(BaseMetaResource):
56
60
  and an integer with the HTTP status code.
57
61
  :rtype: Tuple(dict, integer)
58
62
  """
59
- return self.post_list(data=kwargs)
63
+ return self.post_list(data=kwargs)
64
+
65
+
66
+ class AlarmDetailEndpointBase(BaseMetaResource):
67
+ """
68
+ Endpoint used to get the information of a certain alarm. But not the data!
69
+ """
70
+
71
+ def __init__(self):
72
+ super().__init__()
73
+ self.data_model = AlarmsModel
74
+ self.unique = ["id"]
75
+
76
+
77
+ class AlarmDetailEndpoint(AlarmDetailEndpointBase):
78
+ @doc(description="Get details of an alarm", tags=["None"], inherit=False)
79
+ @authenticate(auth_class=Auth())
80
+ @marshal_with(AlarmsResponse)
81
+ @BaseMetaResource.get_data_or_404
82
+ def get(self, idx):
83
+ """
84
+ API method to get an execution created by the user and its related info.
85
+ It requires authentication to be passed in the form of a token that has to be linked to
86
+ an existing session (login) made by a user.
87
+
88
+ :param str idx: ID of the execution.
89
+ :return: A dictionary with a message (error if authentication failed, or the execution does not exist or
90
+ the data of the execution) and an integer with the HTTP status code.
91
+ :rtype: Tuple(dict, integer)
92
+ """
93
+ current_app.logger.info(
94
+ f"User {self.get_user()} gets details of execution {idx}"
95
+ )
96
+ return self.get_detail(idx=idx)
97
+
98
+ @doc(description="Edit an execution", tags=["Executions"], inherit=False)
99
+ @authenticate(auth_class=Auth())
100
+ @use_kwargs(AlarmEditRequest, location="json")
101
+ def put(self, idx, **data):
102
+ """
103
+ Edit an existing alarm
104
+
105
+ :param string idx: ID of the alarm.
106
+ :return: A dictionary with a message (error if authentication failed, or the alarm does not exist or
107
+ a message) and an integer with the HTTP status code.
108
+ :rtype: Tuple(dict, integer)
109
+ """
110
+ current_app.logger.info(f"User {self.get_user()} edits alarm {idx}")
111
+ return self.put_detail(data, track_user=False, idx=idx)
112
+
113
+ @doc(description="Disable an alarm", tags=["None"])
114
+ @authenticate(auth_class=Auth())
115
+ def delete(self, idx):
116
+ """
117
+ :param int alarm_id: Alarm id.
118
+ :return:
119
+ :rtype: Tuple(dict, integer)
120
+ """
121
+
122
+ current_app.logger.info(f"Alarm {idx} was disabled by user {self.get_user()}")
123
+ return self.disable_detail(idx=idx)
@@ -18,13 +18,16 @@ from cornflow.shared.const import (
18
18
  EXEC_STATE_ERROR,
19
19
  EXEC_STATE_ERROR_START,
20
20
  EXEC_STATE_NOT_RUN,
21
- EXECUTION_STATE_MESSAGE_DICT, VIEWER_ROLE, PLANNER_ROLE, ADMIN_ROLE,
21
+ EXECUTION_STATE_MESSAGE_DICT,
22
+ VIEWER_ROLE,
23
+ PLANNER_ROLE,
24
+ ADMIN_ROLE,
22
25
  )
23
26
  from cornflow.shared.exceptions import (
24
27
  AirflowError,
25
28
  ObjectDoesNotExist,
26
29
  InvalidUsage,
27
- InvalidData
30
+ InvalidData,
28
31
  )
29
32
  from cornflow.shared.validators import json_schema_validate_as_string
30
33
 
@@ -33,6 +36,7 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
33
36
  """
34
37
  Endpoint used to execute the instance and solution checks on an execution
35
38
  """
39
+
36
40
  ROLES_WITH_ACCESS = [PLANNER_ROLE, ADMIN_ROLE]
37
41
 
38
42
  def __init__(self):
@@ -62,7 +66,8 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
62
66
  err = "The execution to check does not exist"
63
67
  raise ObjectDoesNotExist(
64
68
  error=err,
65
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. " + err
69
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
70
+ + err,
66
71
  )
67
72
 
68
73
  schema = execution.schema
@@ -72,7 +77,8 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
72
77
  err = "The execution is still running"
73
78
  raise InvalidUsage(
74
79
  error=err,
75
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. " + err
80
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
81
+ + err,
76
82
  )
77
83
 
78
84
  # this allows testing without airflow interaction:
@@ -92,7 +98,8 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
92
98
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
93
99
  state=EXEC_STATE_ERROR_START,
94
100
  ),
95
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. " + err
101
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
102
+ + err,
96
103
  )
97
104
  # ask airflow if dag_name exists
98
105
  schema_info = af_client.get_orch_info(schema)
@@ -108,7 +115,8 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
108
115
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
109
116
  state=EXEC_STATE_ERROR_START,
110
117
  ),
111
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. " + err
118
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
119
+ + err,
112
120
  )
113
121
 
114
122
  try:
@@ -125,12 +133,13 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
125
133
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
126
134
  state=EXEC_STATE_ERROR,
127
135
  ),
128
- log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. " + error
136
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. "
137
+ + error,
129
138
  )
130
139
 
131
140
  # if we succeed, we register the dag_run_id in the execution table:
132
141
  af_data = response.json()
133
- execution.dag_run_id = af_data["dag_run_id"]
142
+ execution.run_id = af_data["dag_run_id"]
134
143
  execution.update_state(EXEC_STATE_QUEUED)
135
144
  current_app.logger.info(
136
145
  "User {} launches checks of execution {}".format(
@@ -144,7 +153,9 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
144
153
  """
145
154
  Endpoint used to execute the instance and solution checks on an execution
146
155
  """
156
+
147
157
  ROLES_WITH_ACCESS = [PLANNER_ROLE, ADMIN_ROLE]
158
+
148
159
  def __init__(self):
149
160
  super().__init__()
150
161
  self.model = ExecutionModel
@@ -175,7 +186,8 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
175
186
  err = "The instance to check does not exist"
176
187
  raise ObjectDoesNotExist(
177
188
  error=err,
178
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. " + err
189
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
190
+ + err,
179
191
  )
180
192
  payload = dict(
181
193
  config=dict(checks_only=True),
@@ -204,8 +216,8 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
204
216
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
205
217
  state=EXEC_STATE_ERROR_START,
206
218
  ),
207
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. " + err
208
-
219
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
220
+ + err,
209
221
  )
210
222
  # ask airflow if dag_name exists
211
223
  schema_info = af_client.get_orch_info(schema)
@@ -221,8 +233,8 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
221
233
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
222
234
  state=EXEC_STATE_ERROR_START,
223
235
  ),
224
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. " + err
225
-
236
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
237
+ + err,
226
238
  )
227
239
 
228
240
  try:
@@ -239,12 +251,13 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
239
251
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
240
252
  state=EXEC_STATE_ERROR,
241
253
  ),
242
- log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. " + error
254
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on instance {idx}. "
255
+ + error,
243
256
  )
244
257
 
245
258
  # if we succeed, we register the dag_run_id in the execution table:
246
259
  af_data = response.json()
247
- execution.dag_run_id = af_data["dag_run_id"]
260
+ execution.run_id = af_data["dag_run_id"]
248
261
  execution.update_state(EXEC_STATE_QUEUED)
249
262
  current_app.logger.info(
250
263
  "User {} creates instance check execution {}".format(
@@ -258,7 +271,9 @@ class DataCheckCaseEndpoint(BaseMetaResource):
258
271
  """
259
272
  Endpoint used to execute the instance and solution checks on an execution
260
273
  """
274
+
261
275
  ROLES_WITH_ACCESS = [PLANNER_ROLE, ADMIN_ROLE]
276
+
262
277
  def __init__(self):
263
278
  super().__init__()
264
279
  self.model = ExecutionModel
@@ -289,7 +304,8 @@ class DataCheckCaseEndpoint(BaseMetaResource):
289
304
  err = "The case to check does not exist"
290
305
  raise ObjectDoesNotExist(
291
306
  error=err,
292
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. " + err
307
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
308
+ + err,
293
309
  )
294
310
 
295
311
  schema = case.schema or "solve_model_dag"
@@ -309,14 +325,18 @@ class DataCheckCaseEndpoint(BaseMetaResource):
309
325
  if schema == "pulp":
310
326
  validation_schema = "solve_model_dag"
311
327
 
312
- data_jsonschema = DeployedOrch.get_one_schema(config, validation_schema, INSTANCE_SCHEMA)
313
- validation_errors = json_schema_validate_as_string(data_jsonschema, instance_payload["data"])
328
+ data_jsonschema = DeployedOrch.get_one_schema(
329
+ config, validation_schema, INSTANCE_SCHEMA
330
+ )
331
+ validation_errors = json_schema_validate_as_string(
332
+ data_jsonschema, instance_payload["data"]
333
+ )
314
334
 
315
335
  if validation_errors:
316
336
  raise InvalidData(
317
337
  payload=dict(jsonschema_errors=validation_errors),
318
338
  log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
319
- f"Instance data does not match the jsonschema.",
339
+ f"Instance data does not match the jsonschema.",
320
340
  )
321
341
 
322
342
  instance, _ = self.post_list(data=instance_payload)
@@ -334,14 +354,18 @@ class DataCheckCaseEndpoint(BaseMetaResource):
334
354
 
335
355
  payload["data"] = case.solution
336
356
 
337
- data_jsonschema = DeployedOrch.get_one_schema(config, validation_schema, SOLUTION_SCHEMA)
338
- validation_errors = json_schema_validate_as_string(data_jsonschema, payload["data"])
357
+ data_jsonschema = DeployedOrch.get_one_schema(
358
+ config, validation_schema, SOLUTION_SCHEMA
359
+ )
360
+ validation_errors = json_schema_validate_as_string(
361
+ data_jsonschema, payload["data"]
362
+ )
339
363
 
340
364
  if validation_errors:
341
365
  raise InvalidData(
342
366
  payload=dict(jsonschema_errors=validation_errors),
343
367
  log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
344
- f"Solution data does not match the jsonschema.",
368
+ f"Solution data does not match the jsonschema.",
345
369
  )
346
370
 
347
371
  self.data_model = ExecutionModel
@@ -366,7 +390,8 @@ class DataCheckCaseEndpoint(BaseMetaResource):
366
390
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
367
391
  state=EXEC_STATE_ERROR_START,
368
392
  ),
369
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. " + err
393
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
394
+ + err,
370
395
  )
371
396
  # ask airflow if dag_name exists
372
397
  schema_info = af_client.get_orch_info(schema)
@@ -382,7 +407,8 @@ class DataCheckCaseEndpoint(BaseMetaResource):
382
407
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
383
408
  state=EXEC_STATE_ERROR_START,
384
409
  ),
385
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. " + err
410
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
411
+ + err,
386
412
  )
387
413
 
388
414
  try:
@@ -400,12 +426,13 @@ class DataCheckCaseEndpoint(BaseMetaResource):
400
426
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR],
401
427
  state=EXEC_STATE_ERROR,
402
428
  ),
403
- log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. " + error
429
+ log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. "
430
+ + error,
404
431
  )
405
432
 
406
433
  # if we succeed, we register the dag_run_id in the execution table:
407
434
  af_data = response.json()
408
- execution.dag_run_id = af_data["dag_run_id"]
435
+ execution.run_id = af_data["dag_run_id"]
409
436
  execution.update_state(EXEC_STATE_QUEUED)
410
437
  current_app.logger.info(
411
438
  "User {} creates case check execution {}".format(