cornflow 2.0.0a9__tar.gz → 2.0.0a10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (186) hide show
  1. {cornflow-2.0.0a9/cornflow.egg-info → cornflow-2.0.0a10}/PKG-INFO +1 -1
  2. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/service.py +10 -4
  3. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/permissions.py +19 -10
  4. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/config.py +1 -1
  5. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/execution_databricks.py +75 -96
  6. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/execution.py +0 -1
  7. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/const.py +2 -2
  8. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/databricks.py +54 -36
  9. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/integration/test_cornflowclient.py +2 -1
  10. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_executions.py +9 -9
  11. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_health.py +2 -2
  12. {cornflow-2.0.0a9 → cornflow-2.0.0a10/cornflow.egg-info}/PKG-INFO +1 -1
  13. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/setup.py +1 -1
  14. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/MANIFEST.in +0 -0
  15. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/README.rst +0 -0
  16. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/airflow_config/__init__.py +0 -0
  17. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/airflow_config/airflow_local_settings.py +0 -0
  18. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/airflow_config/plugins/XCom/__init__.py +0 -0
  19. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/airflow_config/plugins/XCom/gce_xcom_backend.py +0 -0
  20. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/airflow_config/plugins/__init__.py +0 -0
  21. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/airflow_config/webserver_ldap.py +0 -0
  22. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/__init__.py +0 -0
  23. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/app.py +0 -0
  24. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/__init__.py +0 -0
  25. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/actions.py +0 -0
  26. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/arguments.py +0 -0
  27. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/config.py +0 -0
  28. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/migrations.py +0 -0
  29. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/permissions.py +0 -0
  30. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/roles.py +0 -0
  31. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/schemas.py +0 -0
  32. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/__init__.py +0 -0
  33. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/api_generator.py +0 -0
  34. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/endpoint_tools.py +0 -0
  35. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/models_tools.py +0 -0
  36. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/schema_generator.py +0 -0
  37. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/schemas_tools.py +0 -0
  38. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/tools/tools.py +0 -0
  39. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/users.py +0 -0
  40. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/utils.py +0 -0
  41. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/cli/views.py +0 -0
  42. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/__init__.py +0 -0
  43. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/access.py +0 -0
  44. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/actions.py +0 -0
  45. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/cleanup.py +0 -0
  46. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/dag.py +0 -0
  47. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/roles.py +0 -0
  48. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/schemas.py +0 -0
  49. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/users.py +0 -0
  50. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/commands/views.py +0 -0
  51. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/__init__.py +0 -0
  52. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/action.py +0 -0
  53. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/alarms.py +0 -0
  54. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/apiview.py +0 -0
  55. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/case.py +0 -0
  56. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/dag.py +0 -0
  57. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/data_check.py +0 -0
  58. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/example_data.py +0 -0
  59. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/execution.py +0 -0
  60. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/health.py +0 -0
  61. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/instance.py +0 -0
  62. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/licenses.py +0 -0
  63. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/login.py +0 -0
  64. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/main_alarms.py +0 -0
  65. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/meta_resource.py +0 -0
  66. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/permission.py +0 -0
  67. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/roles.py +0 -0
  68. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/schemas.py +0 -0
  69. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/signup.py +0 -0
  70. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/tables.py +0 -0
  71. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/token.py +0 -0
  72. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/user.py +0 -0
  73. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/endpoints/user_role.py +0 -0
  74. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/gunicorn.py +0 -0
  75. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/README +0 -0
  76. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/alembic.ini +0 -0
  77. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/env.py +0 -0
  78. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/script.py.mako +0 -0
  79. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/00757b557b02_.py +0 -0
  80. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/1af47a419bbd_.py +0 -0
  81. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/4aac5e0c6e66_.py +0 -0
  82. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/7c3ea5ab5501_.py +0 -0
  83. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/991b98e24225_.py +0 -0
  84. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/a472b5ad50b7_.py +0 -0
  85. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/c2db9409cb5f_.py +0 -0
  86. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/c8a6c762e818_.py +0 -0
  87. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/ca449af8034c_.py +0 -0
  88. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/d0e0700dcd8e_.py +0 -0
  89. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/d1b5be1f0549_.py +0 -0
  90. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/e1a50dae1ac9_.py +0 -0
  91. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/e937a5234ce4_.py +0 -0
  92. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/ebdd955fcc5e_.py +0 -0
  93. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/migrations/versions/f3bee20314a2_.py +0 -0
  94. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/__init__.py +0 -0
  95. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/action.py +0 -0
  96. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/alarms.py +0 -0
  97. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/base_data_model.py +0 -0
  98. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/case.py +0 -0
  99. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/dag.py +0 -0
  100. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/dag_permissions.py +0 -0
  101. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/instance.py +0 -0
  102. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/main_alarms.py +0 -0
  103. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/meta_models.py +0 -0
  104. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/permissions.py +0 -0
  105. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/role.py +0 -0
  106. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/user.py +0 -0
  107. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/user_role.py +0 -0
  108. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/models/view.py +0 -0
  109. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/orchestrator_constants.py +0 -0
  110. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/__init__.py +0 -0
  111. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/action.py +0 -0
  112. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/alarms.py +0 -0
  113. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/case.py +0 -0
  114. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/common.py +0 -0
  115. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/dag.py +0 -0
  116. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/example_data.py +0 -0
  117. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/execution.py +0 -0
  118. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/health.py +0 -0
  119. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/instance.py +0 -0
  120. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/main_alarms.py +0 -0
  121. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/model_json.py +0 -0
  122. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/patch.py +0 -0
  123. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/permissions.py +0 -0
  124. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/query.py +0 -0
  125. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/role.py +0 -0
  126. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/schemas.py +0 -0
  127. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/solution_log.py +0 -0
  128. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/tables.py +0 -0
  129. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/user.py +0 -0
  130. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/user_role.py +0 -0
  131. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/schemas/view.py +0 -0
  132. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/__init__.py +0 -0
  133. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/authentication/__init__.py +0 -0
  134. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/authentication/auth.py +0 -0
  135. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/authentication/decorators.py +0 -0
  136. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/authentication/ldap.py +0 -0
  137. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/compress.py +0 -0
  138. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/email.py +0 -0
  139. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/exceptions.py +0 -0
  140. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/licenses.py +0 -0
  141. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/log_config.py +0 -0
  142. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/query_tools.py +0 -0
  143. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/utils.py +0 -0
  144. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/utils_tables.py +0 -0
  145. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/shared/validators.py +0 -0
  146. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/__init__.py +0 -0
  147. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/const.py +0 -0
  148. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/custom_liveServer.py +0 -0
  149. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/custom_test_case.py +0 -0
  150. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/integration/__init__.py +0 -0
  151. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/integration/test_commands.py +0 -0
  152. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/ldap/__init__.py +0 -0
  153. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/ldap/test_ldap_authentication.py +0 -0
  154. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/__init__.py +0 -0
  155. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_actions.py +0 -0
  156. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_alarms.py +0 -0
  157. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_apiview.py +0 -0
  158. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_application.py +0 -0
  159. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_cases.py +0 -0
  160. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_cli.py +0 -0
  161. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_commands.py +0 -0
  162. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_dags.py +0 -0
  163. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_data_checks.py +0 -0
  164. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_example_data.py +0 -0
  165. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_generate_from_schema.py +0 -0
  166. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_instances.py +0 -0
  167. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_instances_file.py +0 -0
  168. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_licenses.py +0 -0
  169. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_log_in.py +0 -0
  170. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_main_alarms.py +0 -0
  171. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_permissions.py +0 -0
  172. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_roles.py +0 -0
  173. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_schema_from_models.py +0 -0
  174. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_schemas.py +0 -0
  175. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_sign_up.py +0 -0
  176. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_tables.py +0 -0
  177. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_token.py +0 -0
  178. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/test_users.py +0 -0
  179. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow/tests/unit/tools.py +0 -0
  180. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow.egg-info/SOURCES.txt +0 -0
  181. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow.egg-info/dependency_links.txt +0 -0
  182. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow.egg-info/entry_points.txt +0 -0
  183. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow.egg-info/requires.txt +0 -0
  184. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/cornflow.egg-info/top_level.txt +0 -0
  185. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/requirements.txt +0 -0
  186. {cornflow-2.0.0a9 → cornflow-2.0.0a10}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: cornflow
3
- Version: 2.0.0a9
3
+ Version: 2.0.0a10
4
4
  Summary: Cornflow is an open source multi-solver optimization server with a REST API built using flask.
5
5
  Home-page: https://github.com/baobabsoluciones/cornflow
6
6
  Author: baobab soluciones
@@ -16,7 +16,13 @@ from cornflow.commands import (
16
16
  update_schemas_command,
17
17
  update_dag_registry_command,
18
18
  )
19
- from cornflow.shared.const import AUTH_DB, ADMIN_ROLE, DATABRICKS_BACKEND, SERVICE_ROLE, AIRFLOW_BACKEND
19
+ from cornflow.shared.const import (
20
+ AUTH_DB,
21
+ ADMIN_ROLE,
22
+ DATABRICKS_BACKEND,
23
+ SERVICE_ROLE,
24
+ AIRFLOW_BACKEND,
25
+ )
20
26
  from cornflow.shared import db
21
27
  from cryptography.fernet import Fernet
22
28
  from flask_migrate import Migrate, upgrade
@@ -106,7 +112,7 @@ def init_cornflow_service():
106
112
 
107
113
  # Check LDAP parameters for active directory and show message
108
114
  if os.getenv("AUTH_TYPE") == 2:
109
- print(
115
+ click.echo(
110
116
  "WARNING: Cornflow will be deployed with LDAP Authorization. Please review your ldap auth configuration."
111
117
  )
112
118
 
@@ -129,10 +135,10 @@ def init_cornflow_service():
129
135
  f"cat > /etc/logrotate.d/cornflow <<EOF\n {conf} \nEOF", shell=True
130
136
  )
131
137
  out_logrotate = logrotate.stdout
132
- print(out_logrotate)
138
+ click.echo(out_logrotate)
133
139
 
134
140
  except error:
135
- print(error)
141
+ click.echo(error)
136
142
 
137
143
  external_application = int(os.getenv("EXTERNAL_APP", 0))
138
144
  if external_application == 0:
@@ -14,13 +14,22 @@ from sqlalchemy.exc import DBAPIError, IntegrityError
14
14
  def register_base_permissions_command(external_app: str = None, verbose: bool = False):
15
15
  if external_app is None:
16
16
  from cornflow.endpoints import resources, alarms_resources
17
+
17
18
  resources_to_register = resources
19
+ extra_permissions = EXTRA_PERMISSION_ASSIGNATION
18
20
  if current_app.config["ALARMS_ENDPOINTS"]:
19
21
  resources_to_register = resources + alarms_resources
20
22
  elif external_app is not None:
21
23
  sys.path.append("./")
22
24
  external_module = import_module(external_app)
23
25
  resources_to_register = external_module.endpoints.resources
26
+ try:
27
+ extra_permissions = (
28
+ EXTRA_PERMISSION_ASSIGNATION
29
+ + external_module.shared.const.EXTRA_PERMISSION_ASSIGNATION
30
+ )
31
+ except AttributeError:
32
+ extra_permissions = EXTRA_PERMISSION_ASSIGNATION
24
33
  else:
25
34
  resources_to_register = []
26
35
  exit()
@@ -52,7 +61,7 @@ def register_base_permissions_command(external_app: str = None, verbose: bool =
52
61
  "api_view_id": views_in_db[endpoint],
53
62
  }
54
63
  )
55
- for role, action, endpoint in EXTRA_PERMISSION_ASSIGNATION
64
+ for role, action, endpoint in extra_permissions
56
65
  ]
57
66
 
58
67
  permissions_in_app_keys = [
@@ -124,7 +133,7 @@ def register_base_permissions_command(external_app: str = None, verbose: bool =
124
133
  def register_dag_permissions_command(
125
134
  open_deployment: int = None, verbose: bool = False
126
135
  ):
127
- click.echo(f"Checkpoint 1")
136
+
128
137
  from flask import current_app
129
138
  from sqlalchemy.exc import DBAPIError, IntegrityError
130
139
 
@@ -138,7 +147,7 @@ def register_dag_permissions_command(
138
147
  (permission.dag_id, permission.user_id)
139
148
  for permission in PermissionsDAG.get_all_objects()
140
149
  ]
141
- click.echo(f"Checkpoint 2")
150
+
142
151
  try:
143
152
  db.session.commit()
144
153
  except DBAPIError as e:
@@ -149,14 +158,13 @@ def register_dag_permissions_command(
149
158
  all_dags = DeployedOrch.get_all_objects().all()
150
159
 
151
160
  if open_deployment == 1:
152
- click.echo(f"Checkpoint 3")
161
+
153
162
  permissions = [
154
163
  PermissionsDAG({"dag_id": dag.id, "user_id": user.id})
155
164
  for user in all_users
156
165
  for dag in all_dags
157
166
  if (dag.id, user.id) not in existing_permissions
158
167
  ]
159
- click.echo(f"Checkpoint 4")
160
168
 
161
169
  else:
162
170
  permissions = [
@@ -165,10 +173,10 @@ def register_dag_permissions_command(
165
173
  for dag in all_dags
166
174
  if (dag.id, user.id) not in existing_permissions and user.is_service_user()
167
175
  ]
168
- click.echo(f"Checkpoint 5")
176
+
169
177
  if len(permissions) > 1:
170
178
  db.session.bulk_save_objects(permissions)
171
- click.echo(f"Checkpoint 6")
179
+
172
180
  try:
173
181
  db.session.commit()
174
182
  except IntegrityError as e:
@@ -177,7 +185,7 @@ def register_dag_permissions_command(
177
185
  except DBAPIError as e:
178
186
  db.session.rollback()
179
187
  current_app.logger.error(f"Unknown error on dag permissions register: {e}")
180
- click.echo(f"Checkpoint 7")
188
+
181
189
  if "postgres" in str(db.session.get_bind()):
182
190
  db.engine.execute(
183
191
  "SELECT setval(pg_get_serial_sequence('permission_dag', 'id'), MAX(id)) FROM permission_dag;"
@@ -190,11 +198,12 @@ def register_dag_permissions_command(
190
198
  current_app.logger.error(
191
199
  f"Unknown error on dag permissions sequence updating: {e}"
192
200
  )
193
- click.echo(f"Checkpoint 7")
201
+
194
202
  if verbose:
203
+ click.echo(f"DAG permissions registered")
195
204
  if len(permissions) > 1:
196
205
  current_app.logger.info(f"DAG permissions registered: {permissions}")
197
206
  else:
198
207
  current_app.logger.info("No new DAG permissions")
199
- click.echo(f"Checkpoint 8")
208
+
200
209
  pass
@@ -41,7 +41,7 @@ class DefaultConfig(object):
41
41
  DATABRICKS_EP_CLUSTERS = os.getenv("DATABRICKS_EP_CLUSTERS")
42
42
  DATABRICKS_CLIENT_ID = os.getenv("DATABRICKS_CLIENT_ID")
43
43
 
44
- # If service user is allow to log with username and password
44
+ # If service user is allowed to log with username and password
45
45
  SERVICE_USER_ALLOW_PASSWORD_LOGIN = int(
46
46
  os.getenv("SERVICE_USER_ALLOW_PASSWORD_LOGIN", 1)
47
47
  )
@@ -5,28 +5,17 @@ These endpoints hve different access url, but manage the same data entities
5
5
  """
6
6
 
7
7
  # Import from libraries
8
- import datetime
9
- import logging
10
- import time
11
- from databricks.sdk import WorkspaceClient
12
- import databricks.sdk.service.jobs as j
13
- from cornflow.shared.const import (
14
- AIRFLOW_BACKEND,
15
- DATABRICKS_BACKEND,
16
- STATUS_HEALTHY,
17
- STATUS_UNHEALTHY,
18
- )
19
- # TODO AGA: Modificar import para sacarlo de cornflow_client
20
- from cornflow.shared.databricks import Databricks
21
- from cornflow_client.constants import INSTANCE_SCHEMA, CONFIG_SCHEMA, SOLUTION_SCHEMA
22
8
  from cornflow_client.airflow.api import Airflow
23
- # TODO AGA: Porqué el import no funcina correctamente
9
+ from cornflow_client.constants import INSTANCE_SCHEMA, CONFIG_SCHEMA, SOLUTION_SCHEMA
10
+
11
+ # TODO AGA: Porqué el import no funcina correctamente
24
12
  from flask import request, current_app
25
13
  from flask_apispec import marshal_with, use_kwargs, doc
26
14
 
27
15
  # Import from internal modules
28
16
  from cornflow.endpoints.meta_resource import BaseMetaResource
29
17
  from cornflow.models import InstanceModel, DeployedOrch, ExecutionModel
18
+ from cornflow.orchestrator_constants import config_orchestrator
30
19
  from cornflow.schemas.execution import (
31
20
  ExecutionDetailsEndpointResponse,
32
21
  ExecutionDetailsEndpointWithIndicatorsResponse,
@@ -38,10 +27,14 @@ from cornflow.schemas.execution import (
38
27
  ExecutionEditRequest,
39
28
  QueryFiltersExecution,
40
29
  ReLaunchExecutionRequest,
41
- ExecutionDetailsWithIndicatorsAndLogResponse
30
+ ExecutionDetailsWithIndicatorsAndLogResponse,
42
31
  )
43
32
  from cornflow.shared.authentication import Auth, authenticate
44
33
  from cornflow.shared.compress import compressed
34
+ from cornflow.shared.const import (
35
+ AIRFLOW_BACKEND,
36
+ DATABRICKS_BACKEND,
37
+ )
45
38
  from cornflow.shared.const import (
46
39
  EXEC_STATE_RUNNING,
47
40
  EXEC_STATE_ERROR,
@@ -51,16 +44,23 @@ from cornflow.shared.const import (
51
44
  EXECUTION_STATE_MESSAGE_DICT,
52
45
  AIRFLOW_TO_STATE_MAP,
53
46
  DATABRICKS_TO_STATE_MAP,
54
- DATABRICKS_FINISH_TO_STATE_MAP,
55
47
  EXEC_STATE_STOPPED,
56
48
  EXEC_STATE_QUEUED,
57
49
  )
58
- from cornflow.shared.exceptions import AirflowError, DatabricksError, ObjectDoesNotExist, InvalidData
50
+
51
+ # TODO AGA: Modificar import para sacarlo de cornflow_client
52
+ from cornflow.shared.databricks import Databricks
53
+ from cornflow.shared.exceptions import (
54
+ AirflowError,
55
+ DatabricksError,
56
+ ObjectDoesNotExist,
57
+ InvalidData,
58
+ EndpointNotImplemented,
59
+ )
59
60
  from cornflow.shared.validators import (
60
61
  json_schema_validate_as_string,
61
62
  json_schema_extend_and_validate_as_string,
62
63
  )
63
- from cornflow.orchestrator_constants import config_orchestrator
64
64
 
65
65
 
66
66
  class ExecutionEndpoint(BaseMetaResource):
@@ -158,13 +158,13 @@ class ExecutionEndpoint(BaseMetaResource):
158
158
  # TODO: should the schema field be cross validated with the instance schema field?
159
159
 
160
160
  ORQ_TYPE = current_app.config["CORNFLOW_BACKEND"]
161
- if ORQ_TYPE==AIRFLOW_BACKEND:
162
- orq_const= config_orchestrator["airflow"]
163
- ORQ_ERROR=AirflowError
164
- elif ORQ_TYPE==DATABRICKS_BACKEND:
165
- orq_const= config_orchestrator["databricks"]
161
+ if ORQ_TYPE == AIRFLOW_BACKEND:
162
+ orq_const = config_orchestrator["airflow"]
163
+ ORQ_ERROR = AirflowError
164
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
165
+ orq_const = config_orchestrator["databricks"]
166
166
  # TODO AGA: Revisar si esto funcionaría correctamente
167
- ORQ_ERROR=DatabricksError
167
+ ORQ_ERROR = DatabricksError
168
168
 
169
169
  if "schema" not in kwargs:
170
170
  kwargs["schema"] = orq_const["def_schema"]
@@ -188,13 +188,15 @@ class ExecutionEndpoint(BaseMetaResource):
188
188
  # We try to create an orch client
189
189
  # Note schema is a string with the name of the job/dag
190
190
  schema = execution.schema
191
- # If we are dealing with DataBricks, the schema will
191
+ # If we are dealing with DataBricks, the schema will
192
192
  # be the job id
193
- orch_client, schema_info, execution= get_orch_client(schema,ORQ_TYPE,execution)
193
+ orch_client, schema_info, execution = get_orch_client(
194
+ schema, ORQ_TYPE, execution
195
+ )
194
196
  # endregion
195
197
 
196
198
  # region VALIDACIONES
197
- # We check if the job/dag exists
199
+ # We check if the job/dag exists
198
200
  orch_client.get_orch_info(schema)
199
201
  # Validate config before running the dag
200
202
  config_schema = DeployedOrch.get_one_schema(config, schema, CONFIG_SCHEMA)
@@ -248,8 +250,8 @@ class ExecutionEndpoint(BaseMetaResource):
248
250
  execution.update_log_txt(f"{solution_errors}")
249
251
  raise InvalidData(payload=dict(jsonschema_errors=solution_errors))
250
252
  # endregion
251
-
252
- if ORQ_TYPE==AIRFLOW_BACKEND:
253
+
254
+ if ORQ_TYPE == AIRFLOW_BACKEND:
253
255
  info = schema_info.json()
254
256
  if info["is_paused"]:
255
257
  err = "The dag exists but it is paused in airflow"
@@ -267,7 +269,7 @@ class ExecutionEndpoint(BaseMetaResource):
267
269
  # TODO AGA: revisar si hay que hacer alguna verificación a los JOBS
268
270
 
269
271
  try:
270
- # TODO AGA: Hay que genestionar la posible eliminación de execution.id como
272
+ # TODO AGA: Hay que genestionar la posible eliminación de execution.id como
271
273
  # parámetro, ya que no se puede seleccionar el id en databricks
272
274
  # revisar las consecuencias que puede tener
273
275
  response = orch_client.run_workflow(execution.id, orch_name=schema)
@@ -318,13 +320,13 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
318
320
  :rtype: Tuple(dict, integer)
319
321
  """
320
322
  ORQ_TYPE = current_app.config["CORNFLOW_BACKEND"]
321
- if ORQ_TYPE==AIRFLOW_BACKEND:
322
- orq_const= config_orchestrator["airflow"]
323
- ORQ_ERROR=AirflowError
324
- elif ORQ_TYPE==DATABRICKS_BACKEND:
325
- orq_const= config_orchestrator["databricks"]
323
+ if ORQ_TYPE == AIRFLOW_BACKEND:
324
+ orq_const = config_orchestrator["airflow"]
325
+ ORQ_ERROR = AirflowError
326
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
327
+ orq_const = config_orchestrator["databricks"]
326
328
  # TODO AGA: Revisar si esto funcionaría correctamente
327
- ORQ_ERROR=DatabricksError
329
+ ORQ_ERROR = DatabricksError
328
330
 
329
331
  config = current_app.config
330
332
  if "schema" not in kwargs:
@@ -369,10 +371,12 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
369
371
  log_txt=f"Error while user {self.get_user()} tries to relaunch execution {idx}. "
370
372
  f"Configuration data does not match the jsonschema.",
371
373
  )
372
- orch_client, schema_info, execution = get_orch_client(schema,ORQ_TYPE,execution)
373
-
374
+ orch_client, schema_info, execution = get_orch_client(
375
+ kwargs["schema"], ORQ_TYPE, execution
376
+ )
377
+
374
378
  if not orch_client.is_alive():
375
- err = orq_const["name"]+" is not accessible"
379
+ err = orq_const["name"] + " is not accessible"
376
380
  current_app.logger.error(err)
377
381
  execution.update_state(EXEC_STATE_ERROR_START)
378
382
  raise ORQ_ERROR(
@@ -384,13 +388,13 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
384
388
  log_txt=f"Error while user {self.get_user()} tries to relaunch execution {idx}. "
385
389
  + err,
386
390
  )
387
-
391
+
388
392
  # ask airflow if dag_name exists
389
393
  schema = execution.schema
390
394
  schema_info = orch_client.get_orch_info(schema)
391
395
 
392
396
  info = schema_info.json()
393
- if ORQ_TYPE==AIRFLOW_BACKEND:
397
+ if ORQ_TYPE == AIRFLOW_BACKEND:
394
398
  if info["is_paused"]:
395
399
  err = "The dag exists but it is paused in airflow"
396
400
  current_app.logger.error(err)
@@ -408,7 +412,7 @@ class ExecutionRelaunchEndpoint(BaseMetaResource):
408
412
  try:
409
413
  response = orch_client.run_workflow(execution.id, orch_name=schema)
410
414
  except ORQ_ERROR as err:
411
- error = orq_const["name"]+" responded with an error: {}".format(err)
415
+ error = orq_const["name"] + " responded with an error: {}".format(err)
412
416
  current_app.logger.error(error)
413
417
  execution.update_state(EXEC_STATE_ERROR)
414
418
  raise ORQ_ERROR(
@@ -436,6 +440,7 @@ class ExecutionDetailsEndpointBase(BaseMetaResource):
436
440
  """
437
441
  Endpoint used to get the information of a certain execution. But not the data!
438
442
  """
443
+
439
444
  # TODO AGA DUDA: Se usa? Qué debería devolver?
440
445
  def __init__(self):
441
446
  super().__init__()
@@ -564,13 +569,13 @@ class ExecutionStatusEndpoint(BaseMetaResource):
564
569
  :rtype: Tuple(dict, integer)
565
570
  """
566
571
  ORQ_TYPE = current_app.config["CORNFLOW_BACKEND"]
567
- if ORQ_TYPE==AIRFLOW_BACKEND:
568
- orq_const= config_orchestrator["airflow"]
569
- ORQ_ERROR=AirflowError
570
- elif ORQ_TYPE==DATABRICKS_BACKEND:
571
- orq_const= config_orchestrator["databricks"]
572
+ if ORQ_TYPE == AIRFLOW_BACKEND:
573
+ orq_const = config_orchestrator["airflow"]
574
+ ORQ_ERROR = AirflowError
575
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
576
+ orq_const = config_orchestrator["databricks"]
572
577
  # TODO AGA: Revisar si esto funcionaría correctamente
573
- ORQ_ERROR=DatabricksError
578
+ ORQ_ERROR = DatabricksError
574
579
  execution = self.data_model.get_one_object(user=self.get_user(), idx=idx)
575
580
  if execution is None:
576
581
  raise ObjectDoesNotExist(
@@ -593,6 +598,7 @@ class ExecutionStatusEndpoint(BaseMetaResource):
593
598
  raise ORQ_ERROR(
594
599
  error=error, payload=dict(message=message, state=state), log_txt=log_txt
595
600
  )
601
+
596
602
  print("The execution is ", execution)
597
603
  print("The execution user is ", self.get_user())
598
604
  print("The execution id is ", idx)
@@ -610,10 +616,12 @@ class ExecutionStatusEndpoint(BaseMetaResource):
610
616
  )
611
617
  schema = execution.schema
612
618
  # TODO AGA: Revisar si merece la pena hacer una funcion que solo
613
- orch_client, schema_info, execution= get_orch_client(schema ,ORQ_TYPE,execution)
619
+ orch_client, schema_info, execution = get_orch_client(
620
+ schema, ORQ_TYPE, execution
621
+ )
614
622
 
615
623
  if not orch_client.is_alive():
616
- err = orq_const["name"] +" is not accessible"
624
+ err = orq_const["name"] + " is not accessible"
617
625
  _raise_af_error(
618
626
  execution,
619
627
  err,
@@ -623,11 +631,9 @@ class ExecutionStatusEndpoint(BaseMetaResource):
623
631
 
624
632
  try:
625
633
  # TODO: get the dag_name from somewhere!
626
- state = orch_client.get_run_status(
627
- dag_run_id
628
- )
634
+ state = orch_client.get_run_status(schema, dag_run_id)
629
635
  except ORQ_ERROR as err:
630
- error = orq_const["name"] +f" responded with an error: {err}"
636
+ error = orq_const["name"] + f" responded with an error: {err}"
631
637
  _raise_af_error(
632
638
  execution,
633
639
  error,
@@ -722,38 +728,9 @@ class ExecutionLogEndpoint(ExecutionDetailsEndpointBase):
722
728
  current_app.logger.info(f"User {self.get_user()} gets log of execution {idx}")
723
729
  return self.get_detail(user=self.get_user(), idx=idx)
724
730
 
731
+
725
732
  # region aux_functions
726
- def submit_one_job(cid):
727
- # trigger one-time-run job and get waiter object
728
- waiter = w.jobs.submit(run_name=f'cornflow-job-{time.time()}', tasks=[
729
- j.SubmitTask(
730
- task_key='nippon_production_scheduling',
731
- existing_cluster_id=cid,
732
- libraries=[],
733
- spark_python_task=j.SparkPythonTask(
734
- python_file='/Workspace/Repos/nippon/nippon_production_scheduling/main.py',
735
- ),
736
- timeout_seconds=0,
737
- )
738
- ])
739
- logging.info(f'starting to poll: {waiter.run_id}')
740
- # callback, that receives a polled entity between state updates
741
- # If you want to perform polling in a separate thread, process, or service,
742
- # you can use w.jobs.wait_get_run_job_terminated_or_skipped(
743
- # run_id=waiter.run_id,
744
- # timeout=datetime.timedelta(minutes=15),
745
- # callback=print_status) to achieve the same results.
746
- #
747
- # Waiter interface allows for `w.jobs.submit(..).result()` simplicity in
748
- # the scenarios, where you need to block the calling thread for the job to finish.
749
- run = waiter.result(timeout=datetime.timedelta(minutes=15),
750
- callback=print_status)
751
- logging.info(f'job finished: {run.run_page_url}')
752
- return waiter.run_id
753
-
754
- def print_status(run: j.Run):
755
- statuses = [f'{t.task_key}: {t.state.life_cycle_state}' for t in run.tasks]
756
- logging.info(f'workflow intermediate status: {", ".join(statuses)}')
733
+
757
734
 
758
735
  def get_orch_client(schema, orq_type, execution):
759
736
  """
@@ -762,10 +739,10 @@ def get_orch_client(schema, orq_type, execution):
762
739
  if orq_type == AIRFLOW_BACKEND:
763
740
  return get_airflow(schema, execution=execution)
764
741
  elif orq_type == DATABRICKS_BACKEND:
765
- return get_databricks(schema,execution=execution)
742
+ return get_databricks(schema, execution=execution)
766
743
  else:
767
744
  raise EndpointNotImplemented()
768
-
745
+
769
746
 
770
747
  def get_airflow(schema, execution):
771
748
  """
@@ -783,12 +760,12 @@ def get_airflow(schema, execution):
783
760
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
784
761
  state=EXEC_STATE_ERROR_START,
785
762
  ),
786
- log_txt=f"Error while user {self.get_user()} tries to create an execution "
763
+ log_txt=f"Error while user {execution.user_id} tries to create an execution "
787
764
  + err,
788
765
  )
789
- # TODO AGA: revisar si tiene sentido que se devuelva execution o si
766
+ # TODO AGA: revisar si tiene sentido que se devuelva execution o si
790
767
  # es un puntero
791
- return af_client,schema_info, execution
768
+ return af_client, schema_info, execution
792
769
 
793
770
 
794
771
  def get_databricks(schema, execution):
@@ -807,21 +784,23 @@ def get_databricks(schema, execution):
807
784
  message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR_START],
808
785
  state=EXEC_STATE_ERROR_START,
809
786
  ),
810
- log_txt=f"Error while user {self.get_user()} tries to create an execution "
787
+ log_txt=f"Error while user {execution.user_id} tries to create an execution "
811
788
  + err,
812
789
  )
813
790
  return db_client, schema_info, execution
814
791
  # endregion
815
792
 
816
- def map_run_state(state,ORQ_TYPE):
793
+
794
+ def map_run_state(state, ORQ_TYPE):
817
795
  """
818
796
  Maps the state of the execution in the orchestrator to the state of the execution in cornflow
819
797
  """
820
- if ORQ_TYPE==AIRFLOW_BACKEND:
798
+ if ORQ_TYPE == AIRFLOW_BACKEND:
799
+ state = state.json()["state"]
821
800
  return AIRFLOW_TO_STATE_MAP.get(state, EXEC_STATE_UNKNOWN)
822
- elif ORQ_TYPE==DATABRICKS_BACKEND:
801
+ elif ORQ_TYPE == DATABRICKS_BACKEND:
823
802
  print("The state is ", state)
824
- preliminar_state = DATABRICKS_TO_STATE_MAP.get(state,EXEC_STATE_UNKNOWN)
803
+ preliminar_state = DATABRICKS_TO_STATE_MAP.get(state, EXEC_STATE_UNKNOWN)
825
804
  # print("The preliminar state is ", preliminar_state)
826
805
  # if preliminar_state =="TERMINATED":
827
806
  # # TODO AGA DUDA: Revisar si es correcto el error predeterminado
@@ -117,7 +117,6 @@ class ExecutionModel(BaseDataModel):
117
117
  :param str message: Message for the error
118
118
  :return: nothing
119
119
  """
120
- print("Updating state to ", code)
121
120
  self.state = code
122
121
  if message is None:
123
122
  self.state_message = EXECUTION_STATE_MESSAGE_DICT[code]
@@ -1,5 +1,5 @@
1
1
  """
2
- In this files we import the values for different constants on cornflow server
2
+ In this file we import the values for different constants on cornflow server
3
3
  """
4
4
 
5
5
  # CORNFLOW BACKEND
@@ -62,7 +62,7 @@ DATABRICKS_TO_STATE_MAP = dict(
62
62
  DATABRICKS_FINISH_TO_STATE_MAP = dict(
63
63
  SUCCESS=EXEC_STATE_CORRECT,
64
64
  USER_CANCELED=EXEC_STATE_STOPPED,
65
- )
65
+ )
66
66
 
67
67
  DATABRICKS_TERMINATE_STATE = "TERMINATED"
68
68
  # These codes and names are inherited from flask app builder in order to have the same names and values