cornflow 1.1.5__py3-none-any.whl → 2.0.0a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cornflow/cli/service.py CHANGED
@@ -16,7 +16,7 @@ from cornflow.commands import (
16
16
  update_schemas_command,
17
17
  update_dag_registry_command,
18
18
  )
19
- from cornflow.shared.const import AUTH_DB, ADMIN_ROLE, SERVICE_ROLE
19
+ from cornflow.shared.const import AUTH_DB, ADMIN_ROLE, DATABRICKS_BACKEND, SERVICE_ROLE, AIRFLOW_BACKEND
20
20
  from cornflow.shared import db
21
21
  from cryptography.fernet import Fernet
22
22
  from flask_migrate import Migrate, upgrade
@@ -37,14 +37,32 @@ def init_cornflow_service():
37
37
  ###################################
38
38
  # Global defaults and back-compat #
39
39
  ###################################
40
+ # cornflow backend selection
41
+ cornflow_backend = os.getenv("CORNFLOW_BACKEND", AIRFLOW_BACKEND)
42
+ os.environ["CORNFLOW_BACKEND"] = cornflow_backend
43
+ cornflow_backend = int(cornflow_backend)
40
44
  # Airflow global default conn
41
- airflow_user = os.getenv("AIRFLOW_USER", "admin")
42
- airflow_pwd = os.getenv("AIRFLOW_PWD", "admin")
43
- airflow_url = os.getenv("AIRFLOW_URL", "http://webserver:8080")
44
- cornflow_url = os.environ.setdefault("cornflow_url", "http://cornflow:5000")
45
- os.environ["AIRFLOW_USER"] = airflow_user
46
- os.environ["AIRFLOW_PWD"] = airflow_pwd
47
- os.environ["AIRFLOW_URL"] = airflow_url
45
+ if cornflow_backend == AIRFLOW_BACKEND:
46
+ airflow_user = os.getenv("AIRFLOW_USER", "admin")
47
+ airflow_pwd = os.getenv("AIRFLOW_PWD", "admin")
48
+ airflow_url = os.getenv("AIRFLOW_URL", "http://webserver:8080")
49
+ os.environ["AIRFLOW_USER"] = airflow_user
50
+ os.environ["AIRFLOW_PWD"] = airflow_pwd
51
+ os.environ["AIRFLOW_URL"] = airflow_url
52
+ elif cornflow_backend == DATABRICKS_BACKEND:
53
+ databricks_url = os.getenv("DATABRICKS_HOST")
54
+ databricks_auth_secret = os.getenv("DATABRICKS_CLIENT_SECRET")
55
+ databricks_token_endpoint = os.getenv("DATABRICKS_TOKEN_ENDPOINT")
56
+ databricks_ep_clusters = os.getenv("DATABRICKS_EP_CLUSTERS")
57
+ databricks_client_id = os.getenv("DATABRICKS_CLIENT_ID")
58
+ os.environ["DATABRICKS_HOST"] = databricks_url
59
+ os.environ["DATABRICKS_CLIENT_SECRET"] = databricks_auth_secret
60
+ os.environ["DATABRICKS_TOKEN_ENDPOINT"] = databricks_token_endpoint
61
+ os.environ["DATABRICKS_EP_CLUSTERS"] = databricks_ep_clusters
62
+ os.environ["DATABRICKS_CLIENT_ID"] = databricks_client_id
63
+ else:
64
+ raise Exception("Selected backend not among valid options")
65
+
48
66
  os.environ["FLASK_APP"] = "cornflow.app"
49
67
  os.environ["SECRET_KEY"] = os.getenv("FERNET_KEY", Fernet.generate_key().decode())
50
68
 
@@ -150,11 +168,17 @@ def init_cornflow_service():
150
168
  SERVICE_ROLE,
151
169
  verbose=True,
152
170
  )
153
- register_deployed_dags_command(
154
- airflow_url, airflow_user, airflow_pwd, verbose=True
155
- )
156
- register_dag_permissions_command(open_deployment, verbose=True)
157
- update_schemas_command(airflow_url, airflow_user, airflow_pwd, verbose=True)
171
+
172
+ if cornflow_backend == AIRFLOW_BACKEND:
173
+ register_deployed_dags_command(
174
+ airflow_url, airflow_user, airflow_pwd, verbose=True
175
+ )
176
+ register_dag_permissions_command(open_deployment, verbose=True)
177
+ update_schemas_command(
178
+ airflow_url, airflow_user, airflow_pwd, verbose=True
179
+ )
180
+ else:
181
+ register_dag_permissions_command(open_deployment, verbose=True)
158
182
 
159
183
  # execute gunicorn application
160
184
  os.system(
@@ -207,14 +231,22 @@ def init_cornflow_service():
207
231
  SERVICE_ROLE,
208
232
  verbose=True,
209
233
  )
210
- register_deployed_dags_command(
211
- airflow_url, airflow_user, airflow_pwd, verbose=True
212
- )
213
- register_dag_permissions_command(open_deployment, verbose=True)
214
- update_schemas_command(airflow_url, airflow_user, airflow_pwd, verbose=True)
215
- update_dag_registry_command(
216
- airflow_url, airflow_user, airflow_pwd, verbose=True
217
- )
234
+
235
+ click.echo(f"Selected backend is: {cornflow_backend}")
236
+ if cornflow_backend == AIRFLOW_BACKEND:
237
+ register_deployed_dags_command(
238
+ airflow_url, airflow_user, airflow_pwd, verbose=True
239
+ )
240
+
241
+ register_dag_permissions_command(open_deployment, verbose=True)
242
+ update_schemas_command(
243
+ airflow_url, airflow_user, airflow_pwd, verbose=True
244
+ )
245
+ update_dag_registry_command(
246
+ airflow_url, airflow_user, airflow_pwd, verbose=True
247
+ )
248
+ else:
249
+ register_dag_permissions_command(open_deployment, verbose=True)
218
250
 
219
251
  os.system(
220
252
  f"/usr/local/bin/gunicorn -c python:cornflow.gunicorn "
cornflow/commands/dag.py CHANGED
@@ -10,7 +10,7 @@ def register_deployed_dags_command(
10
10
 
11
11
  # Internal modules imports
12
12
  from cornflow_client.airflow.api import Airflow
13
- from cornflow.models import DeployedDAG
13
+ from cornflow.models import DeployedOrch
14
14
  from cornflow.shared import db
15
15
 
16
16
  af_client = Airflow(url, user, pwd)
@@ -27,7 +27,7 @@ def register_deployed_dags_command(
27
27
  current_app.logger.info("Airflow is not reachable")
28
28
  return False
29
29
 
30
- dags_registered = [dag.id for dag in DeployedDAG.get_all_objects()]
30
+ dags_registered = [dag.id for dag in DeployedOrch.get_all_objects()]
31
31
 
32
32
  response = af_client.get_model_dags()
33
33
  dag_list = response.json()["dags"]
@@ -39,7 +39,7 @@ def register_deployed_dags_command(
39
39
  }
40
40
 
41
41
  processed_dags = [
42
- DeployedDAG(
42
+ DeployedOrch(
43
43
  {
44
44
  "id": dag["dag_id"],
45
45
  "description": dag["description"],
@@ -75,7 +75,7 @@ def register_deployed_dags_command(
75
75
 
76
76
 
77
77
  def register_deployed_dags_command_test(dags: list = None, verbose: bool = False):
78
- from cornflow.models import DeployedDAG
78
+ from cornflow.models import DeployedOrch
79
79
  from flask import current_app
80
80
  from cornflow_client import get_pulp_jsonschema, get_empty_schema
81
81
 
@@ -83,7 +83,7 @@ def register_deployed_dags_command_test(dags: list = None, verbose: bool = False
83
83
  dags = ["solve_model_dag", "gc", "timer"]
84
84
 
85
85
  deployed_dag = [
86
- DeployedDAG(
86
+ DeployedOrch(
87
87
  {
88
88
  "id": "solve_model_dag",
89
89
  "description": None,
@@ -95,7 +95,7 @@ def register_deployed_dags_command_test(dags: list = None, verbose: bool = False
95
95
  }
96
96
  )
97
97
  ] + [
98
- DeployedDAG(
98
+ DeployedOrch(
99
99
  {
100
100
  "id": dag,
101
101
  "description": None,
@@ -1,6 +1,6 @@
1
1
  import sys
2
2
  from importlib import import_module
3
-
3
+ import click
4
4
  from cornflow.shared.const import (
5
5
  BASE_PERMISSION_ASSIGNATION,
6
6
  EXTRA_PERMISSION_ASSIGNATION,
@@ -124,11 +124,11 @@ def register_base_permissions_command(external_app: str = None, verbose: bool =
124
124
  def register_dag_permissions_command(
125
125
  open_deployment: int = None, verbose: bool = False
126
126
  ):
127
-
127
+ click.echo(f"Checkpoint 1")
128
128
  from flask import current_app
129
129
  from sqlalchemy.exc import DBAPIError, IntegrityError
130
130
 
131
- from cornflow.models import DeployedDAG, PermissionsDAG, UserModel
131
+ from cornflow.models import DeployedOrch, PermissionsDAG, UserModel
132
132
  from cornflow.shared import db
133
133
 
134
134
  if open_deployment is None:
@@ -138,7 +138,7 @@ def register_dag_permissions_command(
138
138
  (permission.dag_id, permission.user_id)
139
139
  for permission in PermissionsDAG.get_all_objects()
140
140
  ]
141
-
141
+ click.echo(f"Checkpoint 2")
142
142
  try:
143
143
  db.session.commit()
144
144
  except DBAPIError as e:
@@ -146,15 +146,17 @@ def register_dag_permissions_command(
146
146
  current_app.logger.error(f"Unknown error on database commit: {e}")
147
147
 
148
148
  all_users = UserModel.get_all_users().all()
149
- all_dags = DeployedDAG.get_all_objects().all()
149
+ all_dags = DeployedOrch.get_all_objects().all()
150
150
 
151
151
  if open_deployment == 1:
152
+ click.echo(f"Checkpoint 3")
152
153
  permissions = [
153
154
  PermissionsDAG({"dag_id": dag.id, "user_id": user.id})
154
155
  for user in all_users
155
156
  for dag in all_dags
156
157
  if (dag.id, user.id) not in existing_permissions
157
158
  ]
159
+ click.echo(f"Checkpoint 4")
158
160
 
159
161
  else:
160
162
  permissions = [
@@ -163,10 +165,10 @@ def register_dag_permissions_command(
163
165
  for dag in all_dags
164
166
  if (dag.id, user.id) not in existing_permissions and user.is_service_user()
165
167
  ]
166
-
168
+ click.echo(f"Checkpoint 5")
167
169
  if len(permissions) > 1:
168
170
  db.session.bulk_save_objects(permissions)
169
-
171
+ click.echo(f"Checkpoint 6")
170
172
  try:
171
173
  db.session.commit()
172
174
  except IntegrityError as e:
@@ -175,7 +177,7 @@ def register_dag_permissions_command(
175
177
  except DBAPIError as e:
176
178
  db.session.rollback()
177
179
  current_app.logger.error(f"Unknown error on dag permissions register: {e}")
178
-
180
+ click.echo(f"Checkpoint 7")
179
181
  if "postgres" in str(db.session.get_bind()):
180
182
  db.engine.execute(
181
183
  "SELECT setval(pg_get_serial_sequence('permission_dag', 'id'), MAX(id)) FROM permission_dag;"
@@ -188,11 +190,11 @@ def register_dag_permissions_command(
188
190
  current_app.logger.error(
189
191
  f"Unknown error on dag permissions sequence updating: {e}"
190
192
  )
191
-
193
+ click.echo(f"Checkpoint 7")
192
194
  if verbose:
193
195
  if len(permissions) > 1:
194
196
  current_app.logger.info(f"DAG permissions registered: {permissions}")
195
197
  else:
196
198
  current_app.logger.info("No new DAG permissions")
197
-
199
+ click.echo(f"Checkpoint 8")
198
200
  pass
cornflow/config.py CHANGED
@@ -15,9 +15,7 @@ class DefaultConfig(object):
15
15
  SECRET_TOKEN_KEY = os.getenv("SECRET_KEY")
16
16
  SECRET_BI_KEY = os.getenv("SECRET_BI_KEY")
17
17
  SQLALCHEMY_DATABASE_URI = os.getenv("DATABASE_URL", "sqlite:///cornflow.db")
18
- AIRFLOW_URL = os.getenv("AIRFLOW_URL")
19
- AIRFLOW_USER = os.getenv("AIRFLOW_USER")
20
- AIRFLOW_PWD = os.getenv("AIRFLOW_PWD")
18
+
21
19
  AUTH_TYPE = int(os.getenv("AUTH_TYPE", AUTH_DB))
22
20
  DEFAULT_ROLE = int(os.getenv("DEFAULT_ROLE", PLANNER_ROLE))
23
21
  CORS_ORIGINS = os.getenv("CORS_ORIGINS", "*")
@@ -28,6 +26,21 @@ class DefaultConfig(object):
28
26
  SIGNUP_ACTIVATED = int(os.getenv("SIGNUP_ACTIVATED", 1))
29
27
  CORNFLOW_SERVICE_USER = os.getenv("CORNFLOW_SERVICE_USER", "service_user")
30
28
 
29
+ # To change the tasks backend used by cornflow to solve the optimization models
30
+ CORNFLOW_BACKEND = int(os.getenv("CORNFLOW_BACKEND", AIRFLOW_BACKEND))
31
+
32
+ # AIRFLOW config
33
+ AIRFLOW_URL = os.getenv("AIRFLOW_URL")
34
+ AIRFLOW_USER = os.getenv("AIRFLOW_USER")
35
+ AIRFLOW_PWD = os.getenv("AIRFLOW_PWD")
36
+
37
+ # DATABRICKS config
38
+ DATABRICKS_URL = os.getenv("DATABRICKS_HOST")
39
+ DATABRICKS_AUTH_SECRET = os.getenv("DATABRICKS_CLIENT_SECRET")
40
+ DATABRICKS_TOKEN_ENDPOINT = os.getenv("DATABRICKS_TOKEN_ENDPOINT")
41
+ DATABRICKS_EP_CLUSTERS = os.getenv("DATABRICKS_EP_CLUSTERS")
42
+ DATABRICKS_CLIENT_ID = os.getenv("DATABRICKS_CLIENT_ID")
43
+
31
44
  # If service user is allow to log with username and password
32
45
  SERVICE_USER_ALLOW_PASSWORD_LOGIN = int(
33
46
  os.getenv("SERVICE_USER_ALLOW_PASSWORD_LOGIN", 1)
@@ -29,7 +29,7 @@ from .data_check import (
29
29
  DataCheckCaseEndpoint,
30
30
  )
31
31
  from .example_data import ExampleDataListEndpoint, ExampleDataDetailEndpoint
32
- from .execution import (
32
+ from .execution_databricks import (
33
33
  ExecutionEndpoint,
34
34
  ExecutionDetailsEndpoint,
35
35
  ExecutionStatusEndpoint,
@@ -16,7 +16,7 @@ import jsonpatch
16
16
 
17
17
  # Import from internal modules
18
18
  from cornflow.endpoints.meta_resource import BaseMetaResource
19
- from cornflow.models import CaseModel, ExecutionModel, DeployedDAG, InstanceModel
19
+ from cornflow.models import CaseModel, ExecutionModel, DeployedOrch, InstanceModel
20
20
  from cornflow.shared.authentication import Auth, authenticate
21
21
  from cornflow.shared.compress import compressed
22
22
  from cornflow.shared.const import VIEWER_ROLE, PLANNER_ROLE, ADMIN_ROLE
@@ -79,14 +79,14 @@ class CaseEndpoint(BaseMetaResource):
79
79
 
80
80
  # We validate the instance data if it exists
81
81
  if kwargs.get("data") is not None:
82
- data_schema = DeployedDAG.get_one_schema(config, schema, INSTANCE_SCHEMA)
82
+ data_schema = DeployedOrch.get_one_schema(config, schema, INSTANCE_SCHEMA)
83
83
  data_errors = json_schema_validate_as_string(data_schema, kwargs["data"])
84
84
  if data_errors:
85
85
  raise InvalidData(payload=dict(jsonschema_errors=data_errors))
86
86
 
87
87
  # And the solution data if it exists
88
88
  if kwargs.get("solution") is not None:
89
- solution_schema = DeployedDAG.get_one_schema(config, schema, SOLUTION_SCHEMA)
89
+ solution_schema = DeployedOrch.get_one_schema(config, schema, SOLUTION_SCHEMA)
90
90
  solution_errors = json_schema_validate_as_string(solution_schema, kwargs["solution"])
91
91
  if solution_errors:
92
92
  raise InvalidData(payload=dict(jsonschema_errors=solution_errors))
@@ -383,7 +383,7 @@ class CaseToInstance(BaseMetaResource):
383
383
  config = current_app.config
384
384
 
385
385
  # Data validation
386
- jsonschema = DeployedDAG.get_one_schema(config, schema, INSTANCE_SCHEMA)
386
+ jsonschema = DeployedOrch.get_one_schema(config, schema, INSTANCE_SCHEMA)
387
387
  data_errors = json_schema_validate_as_string(jsonschema, payload["data"])
388
388
  if data_errors:
389
389
  raise InvalidData(
cornflow/endpoints/dag.py CHANGED
@@ -9,7 +9,7 @@ from flask_apispec import use_kwargs, doc, marshal_with
9
9
 
10
10
  # Import from internal modules
11
11
  from cornflow.endpoints.meta_resource import BaseMetaResource
12
- from cornflow.models import DeployedDAG, ExecutionModel, InstanceModel, CaseModel
12
+ from cornflow.models import DeployedOrch, ExecutionModel, InstanceModel, CaseModel
13
13
  from cornflow.schemas import DeployedDAGSchema, DeployedDAGEditSchema
14
14
  from cornflow.schemas.case import CaseCheckRequest
15
15
  from cornflow.schemas.instance import InstanceCheckRequest
@@ -111,7 +111,7 @@ class DAGDetailEndpoint(BaseMetaResource):
111
111
  if solution_schema is not None:
112
112
  config = current_app.config
113
113
 
114
- solution_schema = DeployedDAG.get_one_schema(config, solution_schema, SOLUTION_SCHEMA)
114
+ solution_schema = DeployedOrch.get_one_schema(config, solution_schema, SOLUTION_SCHEMA)
115
115
  solution_errors = json_schema_validate_as_string(solution_schema, data)
116
116
 
117
117
  if solution_errors:
@@ -215,7 +215,7 @@ class DAGEndpointManual(BaseMetaResource):
215
215
  solution_schema = "solve_model_dag"
216
216
  if solution_schema is not None:
217
217
  config = current_app.config
218
- solution_schema = DeployedDAG.get_one_schema(config, solution_schema, SOLUTION_SCHEMA)
218
+ solution_schema = DeployedOrch.get_one_schema(config, solution_schema, SOLUTION_SCHEMA)
219
219
  solution_errors = json_schema_validate_as_string(solution_schema, data)
220
220
 
221
221
  if solution_errors:
@@ -244,7 +244,7 @@ class DeployedDAGEndpoint(BaseMetaResource):
244
244
 
245
245
  def __init__(self):
246
246
  super().__init__()
247
- self.data_model = DeployedDAG
247
+ self.data_model = DeployedOrch
248
248
 
249
249
  @doc(
250
250
  description="Get list of deployed dags registered on the data base",
@@ -268,7 +268,7 @@ class DeployedDagDetailEndpoint(BaseMetaResource):
268
268
 
269
269
  def __init__(self):
270
270
  super().__init__()
271
- self.data_model = DeployedDAG
271
+ self.data_model = DeployedOrch
272
272
 
273
273
  @doc(
274
274
  description="Endpoint to update the schemas of a deployed DAG",
@@ -10,7 +10,7 @@ from flask_apispec import marshal_with, doc
10
10
 
11
11
  # Import from internal modules
12
12
  from cornflow.endpoints.meta_resource import BaseMetaResource
13
- from cornflow.models import InstanceModel, ExecutionModel, CaseModel, DeployedDAG
13
+ from cornflow.models import InstanceModel, ExecutionModel, CaseModel, DeployedOrch
14
14
  from cornflow.schemas.execution import ExecutionDetailsEndpointResponse
15
15
  from cornflow.shared.authentication import Auth, authenticate
16
16
  from cornflow.shared.const import (
@@ -95,7 +95,7 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
95
95
  log_txt=f"Error while user {self.get_user()} tries to run data checks on execution {idx}. " + err
96
96
  )
97
97
  # ask airflow if dag_name exists
98
- schema_info = af_client.get_dag_info(schema)
98
+ schema_info = af_client.get_orch_info(schema)
99
99
 
100
100
  info = schema_info.json()
101
101
  if info["is_paused"]:
@@ -112,8 +112,8 @@ class DataCheckExecutionEndpoint(BaseMetaResource):
112
112
  )
113
113
 
114
114
  try:
115
- response = af_client.run_dag(
116
- execution.id, dag_name=schema, checks_only=True
115
+ response = af_client.run_workflow(
116
+ execution.id, orch_name=schema, checks_only=True
117
117
  )
118
118
  except AirflowError as err:
119
119
  error = "Airflow responded with an error: {}".format(err)
@@ -208,7 +208,7 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
208
208
 
209
209
  )
210
210
  # ask airflow if dag_name exists
211
- schema_info = af_client.get_dag_info(schema)
211
+ schema_info = af_client.get_orch_info(schema)
212
212
 
213
213
  info = schema_info.json()
214
214
  if info["is_paused"]:
@@ -226,8 +226,8 @@ class DataCheckInstanceEndpoint(BaseMetaResource):
226
226
  )
227
227
 
228
228
  try:
229
- response = af_client.run_dag(
230
- execution.id, dag_name=schema, checks_only=True
229
+ response = af_client.run_workflow(
230
+ execution.id, orch_name=schema, checks_only=True
231
231
  )
232
232
  except AirflowError as err:
233
233
  error = "Airflow responded with an error: {}".format(err)
@@ -309,7 +309,7 @@ class DataCheckCaseEndpoint(BaseMetaResource):
309
309
  if schema == "pulp":
310
310
  validation_schema = "solve_model_dag"
311
311
 
312
- data_jsonschema = DeployedDAG.get_one_schema(config, validation_schema, INSTANCE_SCHEMA)
312
+ data_jsonschema = DeployedOrch.get_one_schema(config, validation_schema, INSTANCE_SCHEMA)
313
313
  validation_errors = json_schema_validate_as_string(data_jsonschema, instance_payload["data"])
314
314
 
315
315
  if validation_errors:
@@ -334,7 +334,7 @@ class DataCheckCaseEndpoint(BaseMetaResource):
334
334
 
335
335
  payload["data"] = case.solution
336
336
 
337
- data_jsonschema = DeployedDAG.get_one_schema(config, validation_schema, SOLUTION_SCHEMA)
337
+ data_jsonschema = DeployedOrch.get_one_schema(config, validation_schema, SOLUTION_SCHEMA)
338
338
  validation_errors = json_schema_validate_as_string(data_jsonschema, payload["data"])
339
339
 
340
340
  if validation_errors:
@@ -369,7 +369,7 @@ class DataCheckCaseEndpoint(BaseMetaResource):
369
369
  log_txt=f"Error while user {self.get_user()} tries to run data checks on case {idx}. " + err
370
370
  )
371
371
  # ask airflow if dag_name exists
372
- schema_info = af_client.get_dag_info(schema)
372
+ schema_info = af_client.get_orch_info(schema)
373
373
 
374
374
  info = schema_info.json()
375
375
  if info["is_paused"]:
@@ -386,8 +386,8 @@ class DataCheckCaseEndpoint(BaseMetaResource):
386
386
  )
387
387
 
388
388
  try:
389
- response = af_client.run_dag(
390
- execution.id, dag_name=schema, checks_only=True, case_id=idx
389
+ response = af_client.run_workflow(
390
+ execution.id, orch_name=schema, checks_only=True, case_id=idx
391
391
  )
392
392
 
393
393
  except AirflowError as err:
@@ -47,7 +47,7 @@ class ExampleDataListEndpoint(BaseMetaResource):
47
47
  raise AirflowError(error="Airflow is not accessible")
48
48
 
49
49
  # try airflow and see if dag_name exists
50
- af_client.get_dag_info(dag_name)
50
+ af_client.get_orch_info(dag_name)
51
51
 
52
52
  current_app.logger.info("User gets example data from {}".format(dag_name))
53
53
 
@@ -90,7 +90,7 @@ class ExampleDataDetailEndpoint(BaseMetaResource):
90
90
  raise AirflowError(error="Airflow is not accessible")
91
91
 
92
92
  # try airflow and see if dag_name exists
93
- af_client.get_dag_info(dag_name)
93
+ af_client.get_orch_info(dag_name)
94
94
 
95
95
  current_app.logger.info("User gets example data from {}".format(dag_name))
96
96
 
@@ -12,7 +12,7 @@ from flask_apispec import marshal_with, use_kwargs, doc
12
12
 
13
13
  # Import from internal modules
14
14
  from cornflow.endpoints.meta_resource import BaseMetaResource
15
- from cornflow.models import InstanceModel, DeployedDAG, ExecutionModel
15
+ from cornflow.models import InstanceModel, DeployedOrch, ExecutionModel
16
16
  from cornflow.schemas.execution import (
17
17
  ExecutionDetailsEndpointResponse,
18
18
  ExecutionDetailsEndpointWithIndicatorsResponse,
@@ -106,7 +106,7 @@ class ExecutionEndpoint(BaseMetaResource):
106
106
  continue
107
107
 
108
108
  try:
109
- response = af_client.get_dag_run_status(
109
+ response = af_client.get_run_status(
110
110
  dag_name=execution.schema, dag_run_id=dag_run_id
111
111
  )
112
112
  except AirflowError as err:
@@ -175,10 +175,10 @@ class ExecutionEndpoint(BaseMetaResource):
175
175
  )
176
176
  # ask airflow if dag_name exists
177
177
  schema = execution.schema
178
- schema_info = af_client.get_dag_info(schema)
178
+ schema_info = af_client.get_orch_info(schema)
179
179
 
180
180
  # Validate config before running the dag
181
- config_schema = DeployedDAG.get_one_schema(config, schema, CONFIG_SCHEMA)
181
+ config_schema = DeployedOrch.get_one_schema(config, schema, CONFIG_SCHEMA)
182
182
  new_config, config_errors = json_schema_extend_and_validate_as_string(
183
183
  config_schema, kwargs["config"]
184
184
  )
@@ -198,7 +198,7 @@ class ExecutionEndpoint(BaseMetaResource):
198
198
  execution.update_config(new_config)
199
199
 
200
200
  # Validate instance data before running the dag
201
- instance_schema = DeployedDAG.get_one_schema(config, schema, INSTANCE_SCHEMA)
201
+ instance_schema = DeployedOrch.get_one_schema(config, schema, INSTANCE_SCHEMA)
202
202
  instance_errors = json_schema_validate_as_string(instance_schema, instance.data)
203
203
  if instance_errors:
204
204
  execution.update_state(
@@ -214,7 +214,7 @@ class ExecutionEndpoint(BaseMetaResource):
214
214
  )
215
215
  # Validate solution data before running the dag (if it exists)
216
216
  if kwargs.get("data") is not None:
217
- solution_schema = DeployedDAG.get_one_schema(
217
+ solution_schema = DeployedOrch.get_one_schema(
218
218
  config, schema, SOLUTION_SCHEMA
219
219
  )
220
220
  solution_errors = json_schema_validate_as_string(
@@ -245,7 +245,7 @@ class ExecutionEndpoint(BaseMetaResource):
245
245
  )
246
246
 
247
247
  try:
248
- response = af_client.run_dag(execution.id, dag_name=schema)
248
+ response = af_client.run_workflow(execution.id, orch_name=schema)
249
249
  except AirflowError as err:
250
250
  error = "Airflow responded with an error: {}".format(err)
251
251
  current_app.logger.error(error)
@@ -325,7 +325,7 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
325
325
  }, 201
326
326
 
327
327
  # Validate config before running the dag
328
- config_schema = DeployedDAG.get_one_schema(
328
+ config_schema = DeployedOrch.get_one_schema(
329
329
  config, kwargs["schema"], CONFIG_SCHEMA
330
330
  )
331
331
  config_errors = json_schema_validate_as_string(config_schema, kwargs["config"])
@@ -353,7 +353,7 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
353
353
  )
354
354
  # ask airflow if dag_name exists
355
355
  schema = execution.schema
356
- schema_info = af_client.get_dag_info(schema)
356
+ schema_info = af_client.get_orch_info(schema)
357
357
 
358
358
  info = schema_info.json()
359
359
  if info["is_paused"]:
@@ -371,7 +371,7 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
371
371
  )
372
372
 
373
373
  try:
374
- response = af_client.run_dag(execution.id, dag_name=schema)
374
+ response = af_client.run_workflow(execution.id, orch_name=schema)
375
375
  except AirflowError as err:
376
376
  error = "Airflow responded with an error: {}".format(err)
377
377
  current_app.logger.error(error)
@@ -445,7 +445,7 @@ class ExecutionDetailsEndpoint(ExecutionDetailsEndpointBase):
445
445
  schema = ExecutionModel.get_one_object(user=self.get_user(), idx=idx).schema
446
446
 
447
447
  if data.get("data") is not None and schema is not None:
448
- data_jsonschema = DeployedDAG.get_one_schema(
448
+ data_jsonschema = DeployedOrch.get_one_schema(
449
449
  config, schema, SOLUTION_SCHEMA
450
450
  )
451
451
  validation_errors = json_schema_validate_as_string(
@@ -573,7 +573,7 @@ class ExecutionStatusEndpoint(BaseMetaResource):
573
573
 
574
574
  try:
575
575
  # TODO: get the dag_name from somewhere!
576
- response = af_client.get_dag_run_status(
576
+ response = af_client.get_run_status(
577
577
  dag_name=execution.schema, dag_run_id=dag_run_id
578
578
  )
579
579
  except AirflowError as err: