cornflow 2.0.0a9__py3-none-any.whl → 2.0.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cornflow/cli/service.py CHANGED
@@ -16,7 +16,13 @@ from cornflow.commands import (
16
16
  update_schemas_command,
17
17
  update_dag_registry_command,
18
18
  )
19
- from cornflow.shared.const import AUTH_DB, ADMIN_ROLE, DATABRICKS_BACKEND, SERVICE_ROLE, AIRFLOW_BACKEND
19
+ from cornflow.shared.const import (
20
+ AUTH_DB,
21
+ ADMIN_ROLE,
22
+ DATABRICKS_BACKEND,
23
+ SERVICE_ROLE,
24
+ AIRFLOW_BACKEND,
25
+ )
20
26
  from cornflow.shared import db
21
27
  from cryptography.fernet import Fernet
22
28
  from flask_migrate import Migrate, upgrade
@@ -106,7 +112,7 @@ def init_cornflow_service():
106
112
 
107
113
  # Check LDAP parameters for active directory and show message
108
114
  if os.getenv("AUTH_TYPE") == 2:
109
- print(
115
+ click.echo(
110
116
  "WARNING: Cornflow will be deployed with LDAP Authorization. Please review your ldap auth configuration."
111
117
  )
112
118
 
@@ -129,10 +135,10 @@ def init_cornflow_service():
129
135
  f"cat > /etc/logrotate.d/cornflow <<EOF\n {conf} \nEOF", shell=True
130
136
  )
131
137
  out_logrotate = logrotate.stdout
132
- print(out_logrotate)
138
+ click.echo(out_logrotate)
133
139
 
134
140
  except error:
135
- print(error)
141
+ click.echo(error)
136
142
 
137
143
  external_application = int(os.getenv("EXTERNAL_APP", 0))
138
144
  if external_application == 0:
@@ -14,13 +14,22 @@ from sqlalchemy.exc import DBAPIError, IntegrityError
14
14
  def register_base_permissions_command(external_app: str = None, verbose: bool = False):
15
15
  if external_app is None:
16
16
  from cornflow.endpoints import resources, alarms_resources
17
+
17
18
  resources_to_register = resources
19
+ extra_permissions = EXTRA_PERMISSION_ASSIGNATION
18
20
  if current_app.config["ALARMS_ENDPOINTS"]:
19
21
  resources_to_register = resources + alarms_resources
20
22
  elif external_app is not None:
21
23
  sys.path.append("./")
22
24
  external_module = import_module(external_app)
23
25
  resources_to_register = external_module.endpoints.resources
26
+ try:
27
+ extra_permissions = (
28
+ EXTRA_PERMISSION_ASSIGNATION
29
+ + external_module.shared.const.EXTRA_PERMISSION_ASSIGNATION
30
+ )
31
+ except AttributeError:
32
+ extra_permissions = EXTRA_PERMISSION_ASSIGNATION
24
33
  else:
25
34
  resources_to_register = []
26
35
  exit()
@@ -52,7 +61,7 @@ def register_base_permissions_command(external_app: str = None, verbose: bool =
52
61
  "api_view_id": views_in_db[endpoint],
53
62
  }
54
63
  )
55
- for role, action, endpoint in EXTRA_PERMISSION_ASSIGNATION
64
+ for role, action, endpoint in extra_permissions
56
65
  ]
57
66
 
58
67
  permissions_in_app_keys = [
@@ -124,7 +133,7 @@ def register_base_permissions_command(external_app: str = None, verbose: bool =
124
133
  def register_dag_permissions_command(
125
134
  open_deployment: int = None, verbose: bool = False
126
135
  ):
127
- click.echo(f"Checkpoint 1")
136
+
128
137
  from flask import current_app
129
138
  from sqlalchemy.exc import DBAPIError, IntegrityError
130
139
 
@@ -138,7 +147,7 @@ def register_dag_permissions_command(
138
147
  (permission.dag_id, permission.user_id)
139
148
  for permission in PermissionsDAG.get_all_objects()
140
149
  ]
141
- click.echo(f"Checkpoint 2")
150
+
142
151
  try:
143
152
  db.session.commit()
144
153
  except DBAPIError as e:
@@ -149,14 +158,13 @@ def register_dag_permissions_command(
149
158
  all_dags = DeployedOrch.get_all_objects().all()
150
159
 
151
160
  if open_deployment == 1:
152
- click.echo(f"Checkpoint 3")
161
+
153
162
  permissions = [
154
163
  PermissionsDAG({"dag_id": dag.id, "user_id": user.id})
155
164
  for user in all_users
156
165
  for dag in all_dags
157
166
  if (dag.id, user.id) not in existing_permissions
158
167
  ]
159
- click.echo(f"Checkpoint 4")
160
168
 
161
169
  else:
162
170
  permissions = [
@@ -165,10 +173,10 @@ def register_dag_permissions_command(
165
173
  for dag in all_dags
166
174
  if (dag.id, user.id) not in existing_permissions and user.is_service_user()
167
175
  ]
168
- click.echo(f"Checkpoint 5")
176
+
169
177
  if len(permissions) > 1:
170
178
  db.session.bulk_save_objects(permissions)
171
- click.echo(f"Checkpoint 6")
179
+
172
180
  try:
173
181
  db.session.commit()
174
182
  except IntegrityError as e:
@@ -177,7 +185,7 @@ def register_dag_permissions_command(
177
185
  except DBAPIError as e:
178
186
  db.session.rollback()
179
187
  current_app.logger.error(f"Unknown error on dag permissions register: {e}")
180
- click.echo(f"Checkpoint 7")
188
+
181
189
  if "postgres" in str(db.session.get_bind()):
182
190
  db.engine.execute(
183
191
  "SELECT setval(pg_get_serial_sequence('permission_dag', 'id'), MAX(id)) FROM permission_dag;"
@@ -190,11 +198,12 @@ def register_dag_permissions_command(
190
198
  current_app.logger.error(
191
199
  f"Unknown error on dag permissions sequence updating: {e}"
192
200
  )
193
- click.echo(f"Checkpoint 7")
201
+
194
202
  if verbose:
203
+ click.echo(f"DAG permissions registered")
195
204
  if len(permissions) > 1:
196
205
  current_app.logger.info(f"DAG permissions registered: {permissions}")
197
206
  else:
198
207
  current_app.logger.info("No new DAG permissions")
199
- click.echo(f"Checkpoint 8")
208
+
200
209
  pass
cornflow/config.py CHANGED
@@ -41,7 +41,7 @@ class DefaultConfig(object):
41
41
  DATABRICKS_EP_CLUSTERS = os.getenv("DATABRICKS_EP_CLUSTERS")
42
42
  DATABRICKS_CLIENT_ID = os.getenv("DATABRICKS_CLIENT_ID")
43
43
 
44
- # If service user is allow to log with username and password
44
+ # If service user is allowed to log with username and password
45
45
  SERVICE_USER_ALLOW_PASSWORD_LOGIN = int(
46
46
  os.getenv("SERVICE_USER_ALLOW_PASSWORD_LOGIN", 1)
47
47
  )
@@ -5,28 +5,17 @@ These endpoints hve different access url, but manage the same data entities
5
5
  """
6
6
 
7
7
  # Import from libraries
8
- import datetime
9
- import logging
10
- import time
11
- from databricks.sdk import WorkspaceClient
12
- import databricks.sdk.service.jobs as j
13
- from cornflow.shared.const import (
14
- AIRFLOW_BACKEND,
15
- DATABRICKS_BACKEND,
16
- STATUS_HEALTHY,
17
- STATUS_UNHEALTHY,
18
- )
19
- # TODO AGA: Modificar import para sacarlo de cornflow_client
20
- from cornflow.shared.databricks import Databricks
21
- from cornflow_client.constants import INSTANCE_SCHEMA, CONFIG_SCHEMA, SOLUTION_SCHEMA
22
8
  from cornflow_client.airflow.api import Airflow
23
- # TODO AGA: Porqué el import no funcina correctamente
9
+ from cornflow_client.constants import INSTANCE_SCHEMA, CONFIG_SCHEMA, SOLUTION_SCHEMA
10
+
11
+ # TODO AGA: Porqué el import no funcina correctamente
24
12
  from flask import request, current_app
25
13
  from flask_apispec import marshal_with, use_kwargs, doc
26
14
 
27
15
  # Import from internal modules
28
16
  from cornflow.endpoints.meta_resource import BaseMetaResource
29
17
  from cornflow.models import InstanceModel, DeployedOrch, ExecutionModel
18
+ from cornflow.orchestrator_constants import config_orchestrator
30
19
  from cornflow.schemas.execution import (
31
20
  ExecutionDetailsEndpointResponse,
32
21
  ExecutionDetailsEndpointWithIndicatorsResponse,
@@ -38,10 +27,14 @@ from cornflow.schemas.execution import (
38
27
  ExecutionEditRequest,
39
28
  QueryFiltersExecution,
40
29
  ReLaunchExecutionRequest,
41
- ExecutionDetailsWithIndicatorsAndLogResponse
30
+ ExecutionDetailsWithIndicatorsAndLogResponse,
42
31
  )
43
32
  from cornflow.shared.authentication import Auth, authenticate
44
33
  from cornflow.shared.compress import compressed
34
+ from cornflow.shared.const import (
35
+ AIRFLOW_BACKEND,
36
+ DATABRICKS_BACKEND,
37
+ )
45
38
  from cornflow.shared.const import (
46
39
  EXEC_STATE_RUNNING,
47
40
  EXEC_STATE_ERROR,
@@ -51,16 +44,23 @@ from cornflow.shared.const import (
51
44
  EXECUTION_STATE_MESSAGE_DICT,
52
45
  AIRFLOW_TO_STATE_MAP,
53
46
  DATABRICKS_TO_STATE_MAP,
54
- DATABRICKS_FINISH_TO_STATE_MAP,
55
47
  EXEC_STATE_STOPPED,
56
48
  EXEC_STATE_QUEUED,
57
49
  )
58
- from cornflow.shared.exceptions import AirflowError, DatabricksError, ObjectDoesNotExist, InvalidData
50
+
51
+ # TODO AGA: Modificar import para sacarlo de cornflow_client
52
+ from cornflow.shared.databricks import Databricks
53
+ from cornflow.shared.exceptions import (
54
+ AirflowError,
55
+ DatabricksError,
56
+ ObjectDoesNotExist,
57
+ InvalidData,
58
+ EndpointNotImplemented,
59
+ )
59
60
  from cornflow.shared.validators import (
60
61
  json_schema_validate_as_string,
61
62
  json_schema_extend_and_validate_as_string,
62
63
  )
63
- from cornflow.orchestrator_constants import config_orchestrator
64
64
 
65
65
 
66
66
  class ExecutionEndpoint(BaseMetaResource):
@@ -158,13 +158,13 @@ class ExecutionEndpoint(BaseMetaResource):
158
158
  # TODO: should the schema field be cross validated with the instance schema field?
159
159
 
160
160
  ORQ_TYPE = current_app.config["CORNFLOW_BACKEND"]
161
- if ORQ_TYPE==AIRFLOW_BACKEND:
162
- orq_const= config_orchestrator["airflow"]
163
- ORQ_ERROR=AirflowError
164
- elif ORQ_TYPE==DATABRICKS_BACKEND:
165
- orq_const= config_orchestrator["databricks"]
161
+ if ORQ_TYPE == AIRFLOW_BACKEND:
162
+ orq_const = config_orchestrator["airflow"]
163
+ ORQ_ERROR = AirflowError
164
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
165
+ orq_const = config_orchestrator["databricks"]
166
166
  # TODO AGA: Revisar si esto funcionaría correctamente
167
- ORQ_ERROR=DatabricksError
167
+ ORQ_ERROR = DatabricksError
168
168
 
169
169
  if "schema" not in kwargs:
170
170
  kwargs["schema"] = orq_const["def_schema"]
@@ -188,13 +188,15 @@ class ExecutionEndpoint(BaseMetaResource):
188
188
  # We try to create an orch client
189
189
  # Note schema is a string with the name of the job/dag
190
190
  schema = execution.schema
191
- # If we are dealing with DataBricks, the schema will
191
+ # If we are dealing with DataBricks, the schema will
192
192
  # be the job id
193
- orch_client, schema_info, execution= get_orch_client(schema,ORQ_TYPE,execution)
193
+ orch_client, schema_info, execution = get_orch_client(
194
+ schema, ORQ_TYPE, execution
195
+ )
194
196
  # endregion
195
197
 
196
198
  # region VALIDACIONES
197
- # We check if the job/dag exists
199
+ # We check if the job/dag exists
198
200
  orch_client.get_orch_info(schema)
199
201
  # Validate config before running the dag
200
202
  config_schema = DeployedOrch.get_one_schema(config, schema, CONFIG_SCHEMA)
@@ -248,8 +250,8 @@ class ExecutionEndpoint(BaseMetaResource):
248
250
  execution.update_log_txt(f"{solution_errors}")
249
251
  raise InvalidData(payload=dict(jsonschema_errors=solution_errors))
250
252
  # endregion
251
-
252
- if ORQ_TYPE==AIRFLOW_BACKEND:
253
+
254
+ if ORQ_TYPE == AIRFLOW_BACKEND:
253
255
  info = schema_info.json()
254
256
  if info["is_paused"]:
255
257
  err = "The dag exists but it is paused in airflow"
@@ -267,7 +269,7 @@ class ExecutionEndpoint(BaseMetaResource):
267
269
  # TODO AGA: revisar si hay que hacer alguna verificación a los JOBS
268
270
 
269
271
  try:
270
- # TODO AGA: Hay que genestionar la posible eliminación de execution.id como
272
+ # TODO AGA: Hay que genestionar la posible eliminación de execution.id como
271
273
  # parámetro, ya que no se puede seleccionar el id en databricks
272
274
  # revisar las consecuencias que puede tener
273
275
  response = orch_client.run_workflow(execution.id, orch_name=schema)
@@ -318,13 +320,13 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
318
320
  :rtype: Tuple(dict, integer)
319
321
  """
320
322
  ORQ_TYPE = current_app.config["CORNFLOW_BACKEND"]
321
- if ORQ_TYPE==AIRFLOW_BACKEND:
322
- orq_const= config_orchestrator["airflow"]
323
- ORQ_ERROR=AirflowError
324
- elif ORQ_TYPE==DATABRICKS_BACKEND:
325
- orq_const= config_orchestrator["databricks"]
323
+ if ORQ_TYPE == AIRFLOW_BACKEND:
324
+ orq_const = config_orchestrator["airflow"]
325
+ ORQ_ERROR = AirflowError
326
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
327
+ orq_const = config_orchestrator["databricks"]
326
328
  # TODO AGA: Revisar si esto funcionaría correctamente
327
- ORQ_ERROR=DatabricksError
329
+ ORQ_ERROR = DatabricksError
328
330
 
329
331
  config = current_app.config
330
332
  if "schema" not in kwargs:
@@ -369,10 +371,12 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
369
371
  log_txt=f"Error while user {self.get_user()} tries to relaunch execution {idx}. "
370
372
  f"Configuration data does not match the jsonschema.",
371
373
  )
372
- orch_client, schema_info, execution = get_orch_client(schema,ORQ_TYPE,execution)
373
-
374
+ orch_client, schema_info, execution = get_orch_client(
375
+ kwargs["schema"], ORQ_TYPE, execution
376
+ )
377
+
374
378
  if not orch_client.is_alive():
375
- err = orq_const["name"]+" is not accessible"
379
+ err = orq_const["name"] + " is not accessible"
376
380
  current_app.logger.error(err)
377
381
  execution.update_state(EXEC_STATE_ERROR_START)
378
382
  raise ORQ_ERROR(
@@ -384,13 +388,13 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
384
388
  log_txt=f"Error while user {self.get_user()} tries to relaunch execution {idx}. "
385
389
  + err,
386
390
  )
387
-
391
+
388
392
  # ask airflow if dag_name exists
389
393
  schema = execution.schema
390
394
  schema_info = orch_client.get_orch_info(schema)
391
395
 
392
396
  info = schema_info.json()
393
- if ORQ_TYPE==AIRFLOW_BACKEND:
397
+ if ORQ_TYPE == AIRFLOW_BACKEND:
394
398
  if info["is_paused"]:
395
399
  err = "The dag exists but it is paused in airflow"
396
400
  current_app.logger.error(err)
@@ -408,7 +412,7 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
408
412
  try:
409
413
  response = orch_client.run_workflow(execution.id, orch_name=schema)
410
414
  except ORQ_ERROR as err:
411
- error = orq_const["name"]+" responded with an error: {}".format(err)
415
+ error = orq_const["name"] + " responded with an error: {}".format(err)
412
416
  current_app.logger.error(error)
413
417
  execution.update_state(EXEC_STATE_ERROR)
414
418
  raise ORQ_ERROR(
@@ -436,6 +440,7 @@ class ExecutionDetailsEndpointBase(BaseMetaResource):
436
440
  """
437
441
  Endpoint used to get the information of a certain execution. But not the data!
438
442
  """
443
+
439
444
  # TODO AGA DUDA: Se usa? Qué debería devolver?
440
445
  def __init__(self):
441
446
  super().__init__()
@@ -564,13 +569,13 @@ class ExecutionStatusEndpoint(BaseMetaResource):
564
569
  :rtype: Tuple(dict, integer)
565
570
  """
566
571
  ORQ_TYPE = current_app.config["CORNFLOW_BACKEND"]
567
- if ORQ_TYPE==AIRFLOW_BACKEND:
568
- orq_const= config_orchestrator["airflow"]
569
- ORQ_ERROR=AirflowError
570
- elif ORQ_TYPE==DATABRICKS_BACKEND:
571
- orq_const= config_orchestrator["databricks"]
572
+ if ORQ_TYPE == AIRFLOW_BACKEND:
573
+ orq_const = config_orchestrator["airflow"]
574
+ ORQ_ERROR = AirflowError
575
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
576
+ orq_const = config_orchestrator["databricks"]
572
577
  # TODO AGA: Revisar si esto funcionaría correctamente
573
- ORQ_ERROR=DatabricksError
578
+ ORQ_ERROR = DatabricksError
574
579
  execution = self.data_model.get_one_object(user=self.get_user(), idx=idx)
575
580
  if execution is None:
576
581
  raise ObjectDoesNotExist(
@@ -593,6 +598,7 @@ class ExecutionStatusEndpoint(BaseMetaResource):
593
598
  raise ORQ_ERROR(
594
599
  error=error, payload=dict(message=message, state=state), log_txt=log_txt
595
600
  )
601
+
596
602
  print("The execution is ", execution)
597
603
  print("The execution user is ", self.get_user())
598
604
  print("The execution id is ", idx)
@@ -610,10 +616,12 @@ class ExecutionStatusEndpoint(BaseMetaResource):
610
616
  )
611
617
  schema = execution.schema
612
618
  # TODO AGA: Revisar si merece la pena hacer una funcion que solo
613
- orch_client, schema_info, execution= get_orch_client(schema ,ORQ_TYPE,execution)
619
+ orch_client, schema_info, execution = get_orch_client(
620
+ schema, ORQ_TYPE, execution
621
+ )
614
622
 
615
623
  if not orch_client.is_alive():
616
- err = orq_const["name"] +" is not accessible"
624
+ err = orq_const["name"] + " is not accessible"
617
625
  _raise_af_error(
618
626
  execution,
619
627
  err,
@@ -623,11 +631,9 @@ class ExecutionStatusEndpoint(BaseMetaResource):
623
631
 
624
632
  try:
625
633
  # TODO: get the dag_name from somewhere!
626
- state = orch_client.get_run_status(
627
- dag_run_id
628
- )
634
+ state = orch_client.get_run_status(schema, dag_run_id)
629
635
  except ORQ_ERROR as err:
630
- error = orq_const["name"] +f" responded with an error: {err}"
636
+ error = orq_const["name"] + f" responded with an error: {err}"
631
637
  _raise_af_error(
632
638
  execution,
633
639
  error,
@@ -722,38 +728,9 @@ class ExecutionLogEndpoint(ExecutionDetailsEndpointBase):
722
728
  current_app.logger.info(f"User {self.get_user()} gets log of execution {idx}")
723
729
  return self.get_detail(user=self.get_user(), idx=idx)
724
730
 
731
+
725
732
  # region aux_functions
726
- def submit_one_job(cid):
727
- # trigger one-time-run job and get waiter object
728
- waiter = w.jobs.submit(run_name=f'cornflow-job-{time.time()}', tasks=[
729
- j.SubmitTask(
730
- task_key='nippon_production_scheduling',
731
- existing_cluster_id=cid,
732
- libraries=[],
733
- spark_python_task=j.SparkPythonTask(
734
- python_file='/Workspace/Repos/nippon/nippon_production_scheduling/main.py',
735
- ),
736
- timeout_seconds=0,
737
- )
738
- ])
739
- logging.info(f'starting to poll: {waiter.run_id}')
740
- # callback, that receives a polled entity between state updates
741
- # If you want to perform polling in a separate thread, process, or service,
742
- # you can use w.jobs.wait_get_run_job_terminated_or_skipped(
743
- # run_id=waiter.run_id,
744
- # timeout=datetime.timedelta(minutes=15),
745
- # callback=print_status) to achieve the same results.
746
- #
747
- # Waiter interface allows for `w.jobs.submit(..).result()` simplicity in
748
- # the scenarios, where you need to block the calling thread for the job to finish.
749
- run = waiter.result(timeout=datetime.timedelta(minutes=15),
750
- callback=print_status)
751
- logging.info(f'job finished: {run.run_page_url}')
752
- return waiter.run_id
753
-
754
- def print_status(run: j.Run):
755
- statuses = [f'{t.task_key}: {t.state.life_cycle_state}' for t in run.tasks]
756
- logging.info(f'workflow intermediate status: {", ".join(statuses)}')
733
+
757
734
 
758
735
  def get_orch_client(schema, orq_type, execution):
759
736
  """
@@ -762,10 +739,10 @@ def get_orch_client(schema, orq_type, execution):
762
739
  if orq_type == AIRFLOW_BACKEND:
763
740
  return get_airflow(schema, execution=execution)
764
741
  elif orq_type == DATABRICKS_BACKEND:
765
- return get_databricks(schema,execution=execution)
742
+ return get_databricks(schema, execution=execution)
766
743
  else:
767
744
  raise EndpointNotImplemented()
768
-
745
+
769
746
 
770
747
  def get_airflow(schema, execution):
771
748
  """
@@ -783,12 +760,12 @@ def get_airflow(schema, execution):
783
760
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
784
761
  state=EXEC_STATE_ERROR_START,
785
762
  ),
786
- log_txt=f"Error while user {self.get_user()} tries to create an execution "
763
+ log_txt=f"Error while user {execution.user_id} tries to create an execution "
787
764
  + err,
788
765
  )
789
- # TODO AGA: revisar si tiene sentido que se devuelva execution o si
766
+ # TODO AGA: revisar si tiene sentido que se devuelva execution o si
790
767
  # es un puntero
791
- return af_client,schema_info, execution
768
+ return af_client, schema_info, execution
792
769
 
793
770
 
794
771
  def get_databricks(schema, execution):
@@ -807,21 +784,23 @@ def get_databricks(schema, execution):
807
784
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
808
785
  state=EXEC_STATE_ERROR_START,
809
786
  ),
810
- log_txt=f"Error while user {self.get_user()} tries to create an execution "
787
+ log_txt=f"Error while user {execution.user_id} tries to create an execution "
811
788
  + err,
812
789
  )
813
790
  return db_client, schema_info, execution
814
791
  # endregion
815
792
 
816
- def map_run_state(state,ORQ_TYPE):
793
+
794
+ def map_run_state(state, ORQ_TYPE):
817
795
  """
818
796
  Maps the state of the execution in the orchestrator to the state of the execution in cornflow
819
797
  """
820
- if ORQ_TYPE==AIRFLOW_BACKEND:
798
+ if ORQ_TYPE == AIRFLOW_BACKEND:
799
+ state = state.json()["state"]
821
800
  return AIRFLOW_TO_STATE_MAP.get(state, EXEC_STATE_UNKNOWN)
822
- elif ORQ_TYPE==DATABRICKS_BACKEND:
801
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
823
802
  print("The state is ", state)
824
- preliminar_state = DATABRICKS_TO_STATE_MAP.get(state,EXEC_STATE_UNKNOWN)
803
+ preliminar_state = DATABRICKS_TO_STATE_MAP.get(state, EXEC_STATE_UNKNOWN)
825
804
  # print("The preliminar state is ", preliminar_state)
826
805
  # if preliminar_state =="TERMINATED":
827
806
  # # TODO AGA DUDA: Revisar si es correcto el error predeterminado
@@ -117,7 +117,6 @@ class ExecutionModel(BaseDataModel):
117
117
  :param str message: Message for the error
118
118
  :return: nothing
119
119
  """
120
- print("Updating state to ", code)
121
120
  self.state = code
122
121
  if message is None:
123
122
  self.state_message = EXECUTION_STATE_MESSAGE_DICT[code]
cornflow/shared/const.py CHANGED
@@ -1,5 +1,5 @@
1
1
  """
2
- In this files we import the values for different constants on cornflow server
2
+ In this file we import the values for different constants on cornflow server
3
3
  """
4
4
 
5
5
  # CORNFLOW BACKEND
@@ -62,7 +62,7 @@ DATABRICKS_TO_STATE_MAP = dict(
62
62
  DATABRICKS_FINISH_TO_STATE_MAP = dict(
63
63
  SUCCESS=EXEC_STATE_CORRECT,
64
64
  USER_CANCELED=EXEC_STATE_STOPPED,
65
- )
65
+ )
66
66
 
67
67
  DATABRICKS_TERMINATE_STATE = "TERMINATED"
68
68
  # These codes and names are inherited from flask app builder in order to have the same names and values
@@ -1,24 +1,31 @@
1
1
  """
2
2
  Python class to implement the Databricks client wrapper
3
3
  """
4
+
4
5
  import requests
5
6
  from databricks.sdk import WorkspaceClient
6
7
  from flask import current_app
7
8
  from cornflow.orchestrator_constants import config_orchestrator
9
+
8
10
  # TODO AGA: CODIGO REPETIDO
9
11
  # TODO AGA: revisar si el import está bien
10
12
  from cornflow_client.constants import DatabricksError
11
- from cornflow.shared.const import DATABRICKS_TO_STATE_MAP,DATABRICKS_TERMINATE_STATE, DATABRICKS_FINISH_TO_STATE_MAP
13
+ from cornflow.shared.const import (
14
+ DATABRICKS_TO_STATE_MAP,
15
+ DATABRICKS_TERMINATE_STATE,
16
+ DATABRICKS_FINISH_TO_STATE_MAP,
17
+ )
18
+
12
19
 
13
20
  class Databricks:
14
21
  def __init__(self, url, auth_secret, token_endpoint, ep_clusters, client_id):
15
22
  self.url = url
16
- self.constants=config_orchestrator["databricks"]
17
- self.auth_secret=auth_secret
18
- self.token_endpoint=token_endpoint
19
- self.ep_clusters=ep_clusters
23
+ self.constants = config_orchestrator["databricks"]
24
+ self.auth_secret = auth_secret
25
+ self.token_endpoint = token_endpoint
26
+ self.ep_clusters = ep_clusters
20
27
  self.client_id = client_id
21
-
28
+
22
29
  @classmethod
23
30
  def from_config(cls, config):
24
31
  data = dict(
@@ -32,61 +39,68 @@ class Databricks:
32
39
 
33
40
  def get_token(self):
34
41
  import requests
35
- url = f'{self.url}{self.token_endpoint}'
36
- data = {
37
- "grant_type": "client_credentials",
38
- "scope": "all-apis"
39
- }
40
- auth = (self.client_id,self.auth_secret)
41
- oauth_response = requests.post(url,data=data,auth=auth)
42
+
43
+ url = f"{self.url}{self.token_endpoint}"
44
+ data = {"grant_type": "client_credentials", "scope": "all-apis"}
45
+ auth = (self.client_id, self.auth_secret)
46
+ oauth_response = requests.post(url, data=data, auth=auth)
42
47
  oauth_response.json()
43
48
  oauth_token = oauth_response.json()["access_token"]
44
49
  return oauth_token
45
-
50
+
46
51
  def is_alive(self):
47
52
  try:
48
- # TODO: this url is project specific. Either it has to be a config option or some other way has to be found
49
- path="/Workspace/Repos/nippon/nippon_production_scheduling/requirements.txt"
53
+ # TODO: this url is project specific. Either it has to be a config option or some other way has to be found
54
+ path = (
55
+ "/Workspace/Repos/nippon/nippon_production_scheduling/requirements.txt"
56
+ )
50
57
  url = f"{self.url}/api/2.0/workspace/get-status?path={path}"
51
- response = self.request_headers_auth(method="GET", url=url)
52
- if "error_code" in response.json().keys():
58
+ response = self.request_headers_auth(method="GET", url=url)
59
+ if "error_code" in response.json().keys():
53
60
  return False
54
61
  return True
55
-
62
+
56
63
  except Exception as err:
57
- current_app.logger.error(f"Error: {err}")
58
- return False
59
-
64
+ current_app.logger.error(f"Error: {err}")
65
+ return False
66
+
60
67
  def get_orch_info(self, orch_name, method="GET"):
61
68
  """
62
69
  Get information about a job in Databricks
63
70
  https://docs.databricks.com/api/workspace/jobs/get
64
71
  """
65
72
  url = f"{self.url}/api/2.1/jobs/get/?job_id={orch_name}"
66
- schema_info = self.request_headers_auth(method=method, url=url)
73
+ schema_info = self.request_headers_auth(method=method, url=url)
67
74
  if "error_code" in schema_info.json().keys():
68
75
  raise DatabricksError("JOB not available")
69
76
  return schema_info
70
- # TODO AGA: incluir un id de job por defecto o hacer obligatorio el uso el parámetro.
77
+
78
+ # TODO AGA: incluir un id de job por defecto o hacer obligatorio el uso el parámetro.
71
79
  # Revisar los efectos secundarios de eliminar execution_id y usar el predeterminado
72
80
  def run_workflow(
73
- self, execution_id, orch_name=config_orchestrator["databricks"]["def_schema"], checks_only=False, case_id=None
74
- ):
81
+ self,
82
+ execution_id,
83
+ orch_name=config_orchestrator["databricks"]["def_schema"],
84
+ checks_only=False,
85
+ case_id=None,
86
+ ):
75
87
  """
76
88
  Run a job in Databricks
77
89
  """
78
90
  # TODO AGA: revisar si la url esta bien/si acepta asi los parámetros
79
91
  url = f"{self.url}/api/2.1/jobs/run-now/"
80
- # TODO AGA: revisar si deben ser notebook parameters o job parameters.
92
+ # TODO AGA: revisar si deben ser notebook parameters o job parameters.
81
93
  # Entender cómo se usa checks_only
82
- payload = dict(job_id=orch_name, notebook_parameters=dict(checks_only=checks_only))
94
+ payload = dict(
95
+ job_id=orch_name, notebook_parameters=dict(checks_only=checks_only)
96
+ )
83
97
  return self.request_headers_auth(method="POST", url=url, json=payload)
84
-
85
- def get_run_status(self, run_id):
98
+
99
+ def get_run_status(self, schema, run_id):
86
100
  """
87
101
  Get the status of a run in Databricks
88
102
  """
89
- print( "asking for run id ", run_id)
103
+ print("asking for run id ", run_id)
90
104
  url = f"{self.url}/api/2.1/jobs/runs/get"
91
105
  payload = dict(run_id=run_id)
92
106
  info = self.request_headers_auth(method="GET", url=url, json=payload)
@@ -94,18 +108,22 @@ class Databricks:
94
108
  print("info is ", info)
95
109
  state = info["status"]["state"]
96
110
  if state == DATABRICKS_TERMINATE_STATE:
97
- if info["status"]["termination_details"]["code"] in DATABRICKS_FINISH_TO_STATE_MAP.keys():
111
+ if (
112
+ info["status"]["termination_details"]["code"]
113
+ in DATABRICKS_FINISH_TO_STATE_MAP.keys()
114
+ ):
98
115
  return info["status"]["termination_details"]["code"]
99
- else:
116
+ else:
100
117
  return "OTHER_FINISH_ERROR"
101
118
  return state
119
+
102
120
  def request_headers_auth(self, status=200, **kwargs):
103
- token =self.get_token()
104
- def_headers = {"Authorization": "Bearer "+ str(token)}
121
+ token = self.get_token()
122
+ def_headers = {"Authorization": "Bearer " + str(token)}
105
123
  headers = kwargs.get("headers", def_headers)
106
124
  response = requests.request(headers=headers, **kwargs)
107
125
  if status is None:
108
126
  return response
109
127
  if response.status_code != status:
110
128
  raise DatabricksError(error=response.text, status_code=response.status_code)
111
- return response
129
+ return response
@@ -1,6 +1,7 @@
1
1
  """
2
2
 
3
3
  """
4
+
4
5
  # Full imports
5
6
  import json
6
7
  import pulp
@@ -279,7 +280,7 @@ class TestCornflowClientOpen(TestCornflowClientBasic):
279
280
  def test_server_alive(self):
280
281
  data = self.client.is_alive()
281
282
  cf_status = data["cornflow_status"]
282
- af_status = data["airflow_status"]
283
+ af_status = data["backend_status"]
283
284
  self.assertEqual(str, type(cf_status))
284
285
  self.assertEqual(str, type(af_status))
285
286
  self.assertEqual(cf_status, STATUS_HEALTHY)
@@ -62,13 +62,13 @@ class TestExecutionsListEndpoint(BaseTestCases.ListFilters):
62
62
  def test_new_execution(self):
63
63
  self.create_new_row(self.url, self.model, payload=self.payload)
64
64
 
65
- @patch("cornflow.endpoints.execution.Airflow")
65
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
66
66
  def test_new_execution_run(self, af_client_class):
67
67
  patch_af_client(af_client_class)
68
68
 
69
69
  self.create_new_row(EXECUTION_URL, self.model, payload=self.payload)
70
70
 
71
- @patch("cornflow.endpoints.execution.Airflow")
71
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
72
72
  def test_new_execution_bad_config(self, af_client_class):
73
73
  patch_af_client(af_client_class)
74
74
  response = self.create_new_row(
@@ -81,7 +81,7 @@ class TestExecutionsListEndpoint(BaseTestCases.ListFilters):
81
81
  self.assertIn("error", response)
82
82
  self.assertIn("jsonschema_errors", response)
83
83
 
84
- @patch("cornflow.endpoints.execution.Airflow")
84
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
85
85
  def test_new_execution_partial_config(self, af_client_class):
86
86
  patch_af_client(af_client_class)
87
87
  self.payload["config"].pop("solver")
@@ -91,7 +91,7 @@ class TestExecutionsListEndpoint(BaseTestCases.ListFilters):
91
91
  self.assertIn("solver", response["config"])
92
92
  self.assertEqual(response["config"]["solver"], "cbc")
93
93
 
94
- @patch("cornflow.endpoints.execution.Airflow")
94
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
95
95
  def test_new_execution_with_solution(self, af_client_class):
96
96
  patch_af_client(af_client_class)
97
97
  self.payload["data"] = self.solution
@@ -102,7 +102,7 @@ class TestExecutionsListEndpoint(BaseTestCases.ListFilters):
102
102
  check_payload=False,
103
103
  )
104
104
 
105
- @patch("cornflow.endpoints.execution.Airflow")
105
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
106
106
  def test_new_execution_with_solution_bad(self, af_client_class):
107
107
  patch_af_client(af_client_class)
108
108
  patch_af_client(af_client_class)
@@ -193,7 +193,7 @@ class TestExecutionRelaunchEndpoint(CustomTestCase):
193
193
  self.assertEqual(row["config"], self.payload["config"])
194
194
  self.assertIsNone(row["checks"])
195
195
 
196
- @patch("cornflow.endpoints.execution.Airflow")
196
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
197
197
  def test_relaunch_execution_run(self, af_client_class):
198
198
  patch_af_client(af_client_class)
199
199
 
@@ -372,7 +372,7 @@ class TestExecutionsDetailEndpoint(
372
372
 
373
373
  self.assertEqual(row.json["checks"], None)
374
374
 
375
- @patch("cornflow.endpoints.execution.Airflow")
375
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
376
376
  def test_stop_execution(self, af_client_class):
377
377
  patch_af_client(af_client_class)
378
378
 
@@ -479,7 +479,7 @@ class TestExecutionsStatusEndpoint(TestExecutionsDetailEndpointMock):
479
479
  self.response_items = {"id", "name", "status"}
480
480
  self.items_to_check = []
481
481
 
482
- @patch("cornflow.endpoints.execution.Airflow")
482
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
483
483
  def test_get_one_status(self, af_client_class):
484
484
  patch_af_client(af_client_class)
485
485
 
@@ -495,7 +495,7 @@ class TestExecutionsStatusEndpoint(TestExecutionsDetailEndpointMock):
495
495
  )
496
496
  self.assertEqual(data["state"], 1)
497
497
 
498
- @patch("cornflow.endpoints.execution.Airflow")
498
+ @patch("cornflow.endpoints.execution_databricks.Airflow")
499
499
  def test_put_one_status(self, af_client_class):
500
500
  patch_af_client(af_client_class)
501
501
 
@@ -28,7 +28,7 @@ class TestHealth(CustomTestCase):
28
28
  response = self.client.get(HEALTH_URL)
29
29
  self.assertEqual(200, response.status_code)
30
30
  cf_status = response.json["cornflow_status"]
31
- af_status = response.json["airflow_status"]
31
+ backend_status = response.json["backend_status"]
32
32
  self.assertEqual(str, type(cf_status))
33
- self.assertEqual(str, type(af_status))
33
+ self.assertEqual(str, type(backend_status))
34
34
  self.assertEqual(cf_status, STATUS_HEALTHY)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: cornflow
3
- Version: 2.0.0a9
3
+ Version: 2.0.0a11
4
4
  Summary: Cornflow is an open source multi-solver optimization server with a REST API built using flask.
5
5
  Home-page: https://github.com/baobabsoluciones/cornflow
6
6
  Author: baobab soluciones
@@ -13,7 +13,7 @@ Requires-Python: >=3.9
13
13
  Requires-Dist: alembic==1.9.2
14
14
  Requires-Dist: apispec<=6.3.0
15
15
  Requires-Dist: click<=8.1.7
16
- Requires-Dist: cornflow-client==2.0.0a6
16
+ Requires-Dist: cornflow-client==2.0.0a10
17
17
  Requires-Dist: cryptography<=42.0.5
18
18
  Requires-Dist: databricks-sdk==0.29.0
19
19
  Requires-Dist: disposable-email-domains>=0.0.86
@@ -6,7 +6,7 @@ airflow_config/plugins/XCom/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
6
6
  airflow_config/plugins/XCom/gce_xcom_backend.py,sha256=vCGvF2jbfZt5bOv-pk5Q_kUR6LomFUojIymimSJmj3o,1795
7
7
  cornflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  cornflow/app.py,sha256=X73N64o8OGEqVIRWbC13e_4xb1lxzOH_BV3F3fzAmXE,7312
9
- cornflow/config.py,sha256=gcKhxmsC3UIbdl42SnrB8vOtrL8_7QCGj5yyYlmSOsI,6124
9
+ cornflow/config.py,sha256=XRO0moHFw3JSIUsww8Id0JCj7JYz_2Bq7SMVfvyTkQ8,6126
10
10
  cornflow/gunicorn.py,sha256=uO-Yk7w7nvQSWh12iDxsVvlG-_2BiKIIjm2UiTk4P9E,480
11
11
  cornflow/orchestrator_constants.py,sha256=VO6EGcHhovH6nFpp3QGY_orYVJbjFrMavmemL-Gx_Vs,269
12
12
  cornflow/cli/__init__.py,sha256=5jBmSMpaE1S9rDaQjS8VHJ6x4FfJG8MhKzMzfw7G4Zc,743
@@ -17,7 +17,7 @@ cornflow/cli/migrations.py,sha256=Stc8H99rG8vgo3yRJcck11zBY_EA4WqyVybglfl8zJE,16
17
17
  cornflow/cli/permissions.py,sha256=4KXKysH4g8YYQIZcPuXFS2g0xEErp-e8I_FAqMGaV7U,1006
18
18
  cornflow/cli/roles.py,sha256=NFG__qrlyOT0h4L4nwo9FSV4DKjGtMVh3gwiJxwM37w,411
19
19
  cornflow/cli/schemas.py,sha256=sxuJOZf12SBZAXDiAYNPB-n9LSxzSwkB3xyhgS_4K9A,6086
20
- cornflow/cli/service.py,sha256=lCFOQXtBMYOm8sjntuyLhY_TshFOMMnEtysFy6zrgc8,10817
20
+ cornflow/cli/service.py,sha256=h44X7U9QHdwHbACBzBNIkmCEhGYpE8QF9l3p3mKgbMU,10857
21
21
  cornflow/cli/users.py,sha256=nPnu8rQNLtwmeXLwYtJ_hjlsa_24XOnQLgBJRBP9bJw,2104
22
22
  cornflow/cli/utils.py,sha256=0tF41gTt6LL9XGOizTQg2GXuOXbqLg6gapCr-HWjJ0Q,733
23
23
  cornflow/cli/views.py,sha256=Xyx2l-Sm7panxQEfR3qksCIUoqF7woMKsYgZALkxUXM,636
@@ -33,7 +33,7 @@ cornflow/commands/access.py,sha256=NTZJFF9la8TDuMcD_ISQtJTj-wtM2p1dddokQJHtkj0,7
33
33
  cornflow/commands/actions.py,sha256=4AwgAmyI6VeaugkISvTlNGrIzMMU_-ZB3MhwDD_CIEA,1544
34
34
  cornflow/commands/cleanup.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
35
  cornflow/commands/dag.py,sha256=4c74pqtAg9y_qw_mOSQU1WULwdd9PnV0SmVqbY4T-uw,3721
36
- cornflow/commands/permissions.py,sha256=TtKJcX219RecAytm8PCdVctaVLZSA7ilhBlPnWxYuFQ,6934
36
+ cornflow/commands/permissions.py,sha256=yvMB3kE2rrqRj-eAh53E0j8Zgk84rz8ZT438ylauHnk,7017
37
37
  cornflow/commands/roles.py,sha256=Oux-UkswkQ74zqaMEJYIEsZpQZGBcGaSahVzx9feAHU,1516
38
38
  cornflow/commands/schemas.py,sha256=QjLXLw5So3f8ZqTg5_uvXxwpo4vE0dMT4_gFMKZHGvQ,1828
39
39
  cornflow/commands/users.py,sha256=MEfqMm2ujso0NQgdUm-crOet-G0M43GNqVCx2Ls-2HY,2591
@@ -47,7 +47,7 @@ cornflow/endpoints/dag.py,sha256=x_F9gGc1_9-zoAErPGPFBhwQKqIgGEoCjDVMmRKgZRg,103
47
47
  cornflow/endpoints/data_check.py,sha256=FrsBCzwXD_E5qCYkXt0Byc_UPC0APBSxbj8EriPVKxI,16520
48
48
  cornflow/endpoints/example_data.py,sha256=e3Y_lZTKVQTkk_pzOnmd-VisuKo7kE-7IqhhLHV-6yw,4374
49
49
  cornflow/endpoints/execution.py,sha256=2tX4UJ23e-phAFfgRpjFASr54OWIROuA6a9Uum2Cg48,28010
50
- cornflow/endpoints/execution_databricks.py,sha256=-QiBp6sRWh-XD2DRe4cDUTiOrUNfqfWGdSQncT5_sik,34912
50
+ cornflow/endpoints/execution_databricks.py,sha256=vsnvOgY5bBD5wDWsoF-z08DuVkKIG2Toz8g1GPTjBBs,33430
51
51
  cornflow/endpoints/health.py,sha256=K1l8YK7t5nfVRi0WXMn124l8-ezvYAYqHlCtitLU9AY,2558
52
52
  cornflow/endpoints/instance.py,sha256=YuB0TTs32eKFxd2GJc7WeVTVWZdv7tqNAtWLrOPsuXo,11618
53
53
  cornflow/endpoints/licenses.py,sha256=82hHWGYvVIiyw9mlwGtMwJMDJ-ShHOi9rvuM6KvfE4U,873
@@ -88,7 +88,7 @@ cornflow/models/base_data_model.py,sha256=mVMHJpEoJeH6Wly_ZIfzLfTPd39nSYpCgmtA_f
88
88
  cornflow/models/case.py,sha256=GEs-xeo0bJ5qJETDnIur-2q2IyR3NSj1K0jP3Arz4Xs,9572
89
89
  cornflow/models/dag.py,sha256=a3X_WSuLj9nNv-c2qziQb9RSOULprTNbWm4G3k5jGAo,3021
90
90
  cornflow/models/dag_permissions.py,sha256=LM2CacGyflwYbG8fbjRaaUry7pQDtvOXjfJpY9jj5EQ,1718
91
- cornflow/models/execution.py,sha256=63wECEMQKehozS0Fnh6-bxnwe2BFCZ0qLDljnRbVxTI,6140
91
+ cornflow/models/execution.py,sha256=4zAnrkYyIJ4v0ulCgMDGyeyE8zmB8wc0PT8hL_HyagE,6098
92
92
  cornflow/models/instance.py,sha256=2E9kBKv1a8soaEAvG8X4qXQ4BVC-IWYD5WQcPmZQw00,3979
93
93
  cornflow/models/main_alarms.py,sha256=9S-Ohr2kYFFWB0HomrpSdDIoUr85Eu1rt90Om_Pa8VY,1748
94
94
  cornflow/models/meta_models.py,sha256=qeliGdpw0_q0GCeZzansF-09Ay5pueaT-QQPVPZ5aj4,12000
@@ -121,8 +121,8 @@ cornflow/schemas/user_role.py,sha256=e5y6RgdZZtLqD-h2B3sa5WokI5-pT78tWw85IG34I74
121
121
  cornflow/schemas/view.py,sha256=ctq9Y1TmjrWdyOqgDYeEx7qbbuNLKfSiNOlFTlXmpaw,429
122
122
  cornflow/shared/__init__.py,sha256=1ahcBwWOsSjGI4FEm77JBQjitBdBszOncKcEMjzwGYE,29
123
123
  cornflow/shared/compress.py,sha256=pohQaGs1xbH8CN6URIH6BAHA--pFq7Hmjz8oI3c3B5c,1347
124
- cornflow/shared/const.py,sha256=FQQQz-LekFN81YPh1KTHwyLaikddPYqQJHdzTHhvTfw,4034
125
- cornflow/shared/databricks.py,sha256=DYhF99DJCxAcTQjB3ih4w9qc_yb_3iG1bpx-eerumvE,4731
124
+ cornflow/shared/const.py,sha256=DmeA1DL-9RXkRXgbGI0kykMuCFSuZ0LGFpwl2rjirOI,4029
125
+ cornflow/shared/databricks.py,sha256=k8MSove2FMEsH5gYGgD2d9i0O4ug4v3lAGUGQOyzEp4,4847
126
126
  cornflow/shared/email.py,sha256=QNDDMv86LZObkevSCyUbLQeR2UD3zWScPIr82NDzYHQ,3437
127
127
  cornflow/shared/exceptions.py,sha256=bD_Eo1T2baSrgfGMKb6r9GZ1fCJbitykEEMy9wOnk4A,7033
128
128
  cornflow/shared/licenses.py,sha256=Lc71Jw2NxVTFWtoXdQ9wJX_o3BDfYg1xVoehDXvnCkQ,1328
@@ -141,7 +141,7 @@ cornflow/tests/custom_liveServer.py,sha256=I_0YNrcKIwVmRov3zCQMWwcCWkMe5V246Hpa4
141
141
  cornflow/tests/custom_test_case.py,sha256=X1j-cy9QKhF4W6_7jcJsTm-0Jn6lluq6gj-g126dFpQ,35945
142
142
  cornflow/tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
143
143
  cornflow/tests/integration/test_commands.py,sha256=mGiMfqIqwvRx08Al6LcHXEKPgEQEJ33EoPIZhGcReX0,697
144
- cornflow/tests/integration/test_cornflowclient.py,sha256=ioAQmQKWW6mXVJhdF4LECZcGIOa_N0xPkFaGWGtxOO8,20963
144
+ cornflow/tests/integration/test_cornflowclient.py,sha256=mwtlzn4MSjQNRhwV3yzhco3DHCS5BVFimTfmB8lB2Cc,20964
145
145
  cornflow/tests/ldap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
146
146
  cornflow/tests/ldap/test_ldap_authentication.py,sha256=6Gu1WkF7MQmcV_10IJkpo2qEloZZ9zjpV18ANDD0HRw,4286
147
147
  cornflow/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -155,9 +155,9 @@ cornflow/tests/unit/test_commands.py,sha256=HjeR7vytN_IQhkmKFJw5St4bSEOIXuTeLPuu
155
155
  cornflow/tests/unit/test_dags.py,sha256=Q8l0K1FoMI6Yn4tuzxfiZp2azCpIWclbUneQnkOattA,13403
156
156
  cornflow/tests/unit/test_data_checks.py,sha256=6s50d1iuRTUcAYn14oEcRS39ZZ6E9ussU4YpkpYhtC4,8612
157
157
  cornflow/tests/unit/test_example_data.py,sha256=rCj3wNW4OMHyLfuPdIKxBa7-eRuFsymij-9Nk27_Z3o,4128
158
- cornflow/tests/unit/test_executions.py,sha256=_hIaiZri7Blyx4DYhBDHh-0peU1HQh66RSPqQJFveE8,17501
158
+ cornflow/tests/unit/test_executions.py,sha256=zSb2NzNpjQKFDX6t5-coqhROHvrevktG2FCbHQkdRKs,17600
159
159
  cornflow/tests/unit/test_generate_from_schema.py,sha256=L1EdnASbDJ8SjrX1V4WnUKKwV0sRTwVnNYnxSpyeSeQ,15376
160
- cornflow/tests/unit/test_health.py,sha256=0E0HXMb63_Z8drbLZdxnJwtTbQyaZS9ZEHut6qsDbh8,1033
160
+ cornflow/tests/unit/test_health.py,sha256=er-CkoYI3Nb9lq0pGn-rIbhfIigxXI5wYNFNn-OsAoc,1043
161
161
  cornflow/tests/unit/test_instances.py,sha256=RaD9Tue2HODKThBNhciu6krdIvrauDLxOq4Y6a_z8DU,10573
162
162
  cornflow/tests/unit/test_instances_file.py,sha256=zXxSlOM_MMkFvpWNX-iatD40xoIAOGQkinCLf1txb0M,1986
163
163
  cornflow/tests/unit/test_licenses.py,sha256=jgnfE4UMFooGn44HK_KspJXIpmLjUpK_WgsBBeTO5eI,1534
@@ -172,8 +172,8 @@ cornflow/tests/unit/test_tables.py,sha256=dY55YgaCkyqwJnqn0LbZHNeXBoL4ZxXWwKkCoT
172
172
  cornflow/tests/unit/test_token.py,sha256=OEVPgG8swSMkUbuGJGfGF5Z27utMLICn1eIyma1cM9E,3760
173
173
  cornflow/tests/unit/test_users.py,sha256=WfaMcybPpR7rspXyvzHGgw25p751hMPAV0DOp_caSPM,22430
174
174
  cornflow/tests/unit/tools.py,sha256=BCAm_KGVgZO-CCb_rkaZlbK4SID_F2ab8FiBJzGwKtc,587
175
- cornflow-2.0.0a9.dist-info/METADATA,sha256=qRfLB20zSSHf-LfT1RzSGLEy8VwFxFhM75XdiYK3DIQ,9536
176
- cornflow-2.0.0a9.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
177
- cornflow-2.0.0a9.dist-info/entry_points.txt,sha256=q9cPKAFBsmHkERCqQ2JcOTM-tVBLHTl-DGxwCXowAWM,46
178
- cornflow-2.0.0a9.dist-info/top_level.txt,sha256=Qj9kLFJW1PLb-ZV2s_aCkQ-Wi5W6KC6fFR-LTBrx-rU,24
179
- cornflow-2.0.0a9.dist-info/RECORD,,
175
+ cornflow-2.0.0a11.dist-info/METADATA,sha256=1IXMUU-N5lhB8E_T6v84rXqOZGP9LDcvFFFai-sBH1o,9538
176
+ cornflow-2.0.0a11.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
177
+ cornflow-2.0.0a11.dist-info/entry_points.txt,sha256=q9cPKAFBsmHkERCqQ2JcOTM-tVBLHTl-DGxwCXowAWM,46
178
+ cornflow-2.0.0a11.dist-info/top_level.txt,sha256=Qj9kLFJW1PLb-ZV2s_aCkQ-Wi5W6KC6fFR-LTBrx-rU,24
179
+ cornflow-2.0.0a11.dist-info/RECORD,,