cornflow 2.0.0a13__py3-none-any.whl → 2.0.0a14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. cornflow/app.py +3 -1
  2. cornflow/cli/__init__.py +4 -0
  3. cornflow/cli/actions.py +4 -0
  4. cornflow/cli/config.py +4 -0
  5. cornflow/cli/migrations.py +13 -8
  6. cornflow/cli/permissions.py +4 -0
  7. cornflow/cli/roles.py +4 -0
  8. cornflow/cli/schemas.py +5 -0
  9. cornflow/cli/service.py +260 -147
  10. cornflow/cli/tools/api_generator.py +13 -10
  11. cornflow/cli/tools/endpoint_tools.py +191 -196
  12. cornflow/cli/tools/models_tools.py +87 -60
  13. cornflow/cli/tools/schema_generator.py +161 -67
  14. cornflow/cli/tools/schemas_tools.py +4 -5
  15. cornflow/cli/users.py +8 -0
  16. cornflow/cli/views.py +4 -0
  17. cornflow/commands/dag.py +3 -2
  18. cornflow/commands/schemas.py +6 -4
  19. cornflow/commands/users.py +12 -17
  20. cornflow/config.py +3 -2
  21. cornflow/endpoints/dag.py +27 -25
  22. cornflow/endpoints/data_check.py +102 -164
  23. cornflow/endpoints/example_data.py +9 -3
  24. cornflow/endpoints/execution.py +27 -23
  25. cornflow/endpoints/health.py +4 -5
  26. cornflow/endpoints/instance.py +39 -12
  27. cornflow/endpoints/meta_resource.py +4 -5
  28. cornflow/shared/airflow.py +157 -0
  29. cornflow/shared/authentication/auth.py +73 -42
  30. cornflow/shared/const.py +9 -0
  31. cornflow/shared/databricks.py +10 -10
  32. cornflow/shared/exceptions.py +3 -1
  33. cornflow/shared/utils_tables.py +36 -8
  34. cornflow/shared/validators.py +1 -1
  35. cornflow/tests/custom_test_case.py +4 -4
  36. cornflow/tests/unit/test_alarms.py +1 -2
  37. cornflow/tests/unit/test_cases.py +4 -7
  38. cornflow/tests/unit/test_executions.py +29 -20
  39. cornflow/tests/unit/test_log_in.py +46 -9
  40. cornflow/tests/unit/test_tables.py +3 -3
  41. cornflow/tests/unit/tools.py +31 -13
  42. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/METADATA +2 -2
  43. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/RECORD +46 -45
  44. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/WHEEL +1 -1
  45. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/entry_points.txt +0 -0
  46. {cornflow-2.0.0a13.dist-info → cornflow-2.0.0a14.dist-info}/top_level.txt +0 -0
@@ -29,40 +29,47 @@ class ModelGenerator:
29
29
  self.table_name = table_name
30
30
  self.app_name = app_name
31
31
 
32
+ def _format_description(self, description_obj, prefix=""):
33
+ """Formats a description object (str, dict, or None) into a string."""
34
+ if description_obj is None or description_obj == "":
35
+ return ""
36
+ if isinstance(description_obj, dict):
37
+ # Assuming 'en' key exists if it's a dict
38
+ desc_text = description_obj.get("en", "")
39
+ else:
40
+ desc_text = str(description_obj)
41
+ return f"{prefix}{desc_text}\n\n" if desc_text else ""
42
+
43
+ def _format_field_description(self, key, val):
44
+ """Formats the description line for a single model field."""
45
+ desc_text = self._format_description(val.get("description")).strip()
46
+ primary_key_text = " The primary key." if key == "id" else ""
47
+ return f' - **{key}**: {val["type"]}.{primary_key_text} {desc_text}\n'
48
+
32
49
  def generate_model_description(self):
33
- res = ' """\n'
34
- res += f" Model class for table {self.table_name} of the application {self.app_name}\n"
35
- res += f' It inherits from :class:`{" and :class:".join(self.parents_class)}`\n\n'
36
- app_description = self.schema.get("description")
37
- if app_description is not None and app_description != "":
38
- if isinstance(app_description, dict):
39
- app_description = app_description["en"]
40
- res += f" Description of the app: {app_description}\n\n"
41
- table_description = self.schema["properties"][self.table_name].get(
42
- "description"
50
+ """Generates the model's docstring description."""
51
+ lines = [
52
+ ' """\n',
53
+ f" Model class for table {self.table_name} of the application {self.app_name}\n",
54
+ f' It inherits from :class:`{" and :class:".join(self.parents_class)}`\n\n',
55
+ ]
56
+
57
+ app_desc = self.schema.get("description")
58
+ lines.append(self._format_description(app_desc, "Description of the app: "))
59
+
60
+ table_desc = self.schema["properties"][self.table_name].get("description")
61
+ lines.append(self._format_description(table_desc, "Description of the table: "))
62
+
63
+ lines.append(
64
+ f" The :class:`{self.class_name}` has the following fields: \n\n"
43
65
  )
44
- if table_description is not None and table_description != "":
45
- if isinstance(table_description, dict):
46
- table_description = table_description["en"]
47
- res += f" Description of the table: {table_description}\n\n"
48
- res += f" The :class:`{self.class_name}` has the following fields: \n\n"
49
- for key, val in self.schema["properties"][self.table_name]["items"][
50
- "properties"
51
- ].items():
52
- if key != "id":
53
- if isinstance(val.get("description"), dict):
54
- res += (
55
- f' - **{key}**: {val["type"]}. {val["description"]["en"]}\n'
56
- )
57
- else:
58
- res += f' - **{key}**: {val["type"]}. {val.get("description") or ""}\n'
59
- else:
60
- if isinstance(val.get("description"), dict):
61
- res += f' - **{key}**: {val["type"]}. The primary key. {val["description"]["en"]}\n'
62
- else:
63
- res += f' - **{key}**: {val["type"]}. The primary key. {val.get("description") or ""}\n'
64
- res += ' """\n'
65
- return res
66
+
67
+ fields = self.schema["properties"][self.table_name]["items"]["properties"]
68
+ for key, val in fields.items():
69
+ lines.append(self._format_field_description(key, val))
70
+
71
+ lines.append(' """\n')
72
+ return "".join(lines)
66
73
 
67
74
  def generate_table_name(self):
68
75
  res = " # Table name in the database\n"
@@ -72,36 +79,51 @@ class ModelGenerator:
72
79
  res += f' __tablename__ = "{self.app_name}_{self.table_name}"\n'
73
80
  return res
74
81
 
82
+ def _generate_field_definition(self, key, val, schema_table):
83
+ """Generates the db.Column definition string for a single field."""
84
+ parts = [f" {key} = db.Column("]
85
+ ty, nullable = get_type(val)
86
+ parts.append(JSON_TYPES_TO_SQLALCHEMY[ty])
87
+
88
+ # Handle foreign key
89
+ if val.get("foreign_key"):
90
+ foreign_table, foreign_prop = val["foreign_key"].split(".")
91
+ if self.app_name is not None:
92
+ foreign_table = self.app_name + "_" + foreign_table
93
+ parts.append(f', db.ForeignKey("{foreign_table}.{foreign_prop}")')
94
+
95
+ # Handle nullability
96
+ is_required = key in schema_table.get("required", [])
97
+ if is_required and not nullable:
98
+ parts.append(", nullable=False")
99
+ else:
100
+ parts.append(", nullable=True")
101
+
102
+ # Handle primary key for 'id' specifically
103
+ if key == "id":
104
+ parts.append(", primary_key=True")
105
+
106
+ parts.append(")")
107
+ return "".join(parts) + "\n"
108
+
75
109
  def generate_model_fields(self):
110
+ """Generates the SQLAlchemy model field definitions."""
76
111
  schema_table = self.schema["properties"][self.table_name]["items"]
112
+ properties = schema_table.get("properties", {})
77
113
 
78
- def has_id(schema):
79
- for prop in schema:
80
- if prop == "id":
81
- return True
82
- return False
83
-
84
- res = " # Model fields\n"
85
- if not has_id(schema_table["properties"]):
86
- res += f" id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n"
87
- for key, val in schema_table["properties"].items():
88
- res += f" {key} = db.Column("
89
- ty, nullable = get_type(val)
90
- res += JSON_TYPES_TO_SQLALCHEMY[ty]
91
- if val.get("foreign_key"):
92
- foreign_table, foreign_prop = val["foreign_key"].split(".")
93
- if self.app_name is not None:
94
- foreign_table = self.app_name + "_" + foreign_table
95
-
96
- res += f', db.ForeignKey("{foreign_table}.{foreign_prop}")'
97
- if key in schema_table["required"] and not nullable:
98
- res += ", nullable=False"
99
- else:
100
- res += ", nullable=True"
101
- if key == "id":
102
- res += ", primary_key=True"
103
- res += ")\n"
104
- return res
114
+ lines = [" # Model fields\n"]
115
+
116
+ # Add default ID if not present in schema
117
+ if "id" not in properties:
118
+ lines.append(
119
+ " id = db.Column(db.Integer, primary_key=True, autoincrement=True)\n"
120
+ )
121
+
122
+ # Generate definition for each field in the schema
123
+ for key, val in properties.items():
124
+ lines.append(self._generate_field_definition(key, val, schema_table))
125
+
126
+ return "".join(lines)
105
127
 
106
128
  def generate_model_init(self):
107
129
  keys = self.schema["properties"][self.table_name]["items"]["properties"].keys()
@@ -130,5 +152,10 @@ class ModelGenerator:
130
152
  res += SP8 + f":return: The representation of the :class:`{self.class_name}`\n"
131
153
  res += SP8 + ":rtype: str\n"
132
154
  res += SP8 + '"""\n'
133
- res += SP8 + f"return self.__repr__()"
155
+ res += SP8 + "return self.__repr__()"
156
+ return res
157
+
158
+ def generate_imports(self):
159
+ # Imports from libraries
160
+ res = model_shared_imports
134
161
  return res
@@ -61,6 +61,8 @@ class SchemaGenerator:
61
61
  for file_path, file_name in files:
62
62
  with open(file_path, "r") as fd:
63
63
  text = fd.read()
64
+ # SonarQube ReDoS FP: Pattern uses .+ but operates on Python source code
65
+ # within a developer CLI tool, making ReDoS risk negligible.
64
66
  parents = re.findall(r"class (.+)\((.+)\):", text)
65
67
  for cl, parent in parents:
66
68
  self.parents[cl] = parent.replace(" ", "")
@@ -72,77 +74,169 @@ class SchemaGenerator:
72
74
 
73
75
  sys.modules["mockedpackage"] = MagicMock()
74
76
 
75
- def parse(self, files):
77
+ def _load_module(self, file_path, file_name):
78
+ """Loads a Python module dynamically from a file path."""
79
+ try:
80
+ spec = importlib.util.spec_from_file_location(file_name, file_path)
81
+ if spec is None or spec.loader is None:
82
+ click.echo(f"Warning: Could not create spec for {file_path}")
83
+ return None
84
+ mod = importlib.util.module_from_spec(spec)
85
+ spec.loader.exec_module(mod)
86
+ return mod
87
+ except Exception as e:
88
+ click.echo(f"Error loading module {file_name}: {e}")
89
+ return None
90
+
91
+ def _process_module(self, mod):
92
+ """Processes a loaded module to find and parse model classes."""
93
+ if mod is None:
94
+ return
95
+
96
+ models_in_mod = SuperDict(mod.__dict__).kfilter(lambda k: k in self.parents)
97
+ for model_name, model_class in models_in_mod.items():
98
+ processed_data = self._process_model(model_name, model_class, mod)
99
+ if processed_data:
100
+ table_name, props = processed_data
101
+ self._process_properties(table_name, props)
102
+
103
+ def _get_model_properties(self, model_name, model_class, mod):
104
+ """Extracts properties (props) from a model class, handling mocks and abstract classes."""
105
+ if isinstance(model_class, MagicMock):
106
+ # Handle mocked models (often from relative imports)
107
+ if not isinstance(mod.__dict__[model_name]._mock_return_value, dict):
108
+ return None
109
+ return mod.__dict__[model_name]._mock_return_value
110
+ elif getattr(model_class, "__abstract__", False):
111
+ # Handle abstract base models
112
+ self.parents[model_name] = None # Mark as a base parent
113
+ return model_class.__dict__
114
+ elif hasattr(model_class, "__table__"):
115
+ # Handle concrete SQLAlchemy models
116
+ self.parents[model_name] = None # Mark as a base parent
117
+ tmp = model_class.__dict__
118
+ props = {"__tablename__": tmp.get("__tablename__")}
119
+ # Extract columns directly from the __table__ object
120
+ for col in model_class.__table__.columns:
121
+ # Use col.key instead of internal __dict__['key']
122
+ # Use col instead of iterating through proxy_set, as col represents the Column object
123
+ props[col.key] = col
124
+ return props
125
+ else:
126
+ # Not a recognized model type
127
+ return None
128
+
129
+ def _process_model(self, model_name, model_class, mod):
130
+ """Processes a single model class to get its properties and initialize schema."""
131
+ props = self._get_model_properties(model_name, model_class, mod)
132
+ if props is None:
133
+ return None
134
+
135
+ table_name = props.get("__tablename__", model_name)
136
+
137
+ # Initialize schema structure for this table
138
+ self.data[table_name] = SuperDict(
139
+ type="array", items=dict(properties=dict(), required=[])
140
+ )
141
+
142
+ # Mark for removal if it's a base class without a table and we want to remove bases
143
+ if not props.get("__tablename__") and not self.leave_bases:
144
+ self.data[table_name]["remove"] = True
145
+
146
+ # Update model/table name mappings
147
+ self.model_table[model_name] = table_name
148
+ self.table_model[table_name] = model_name
149
+
150
+ return table_name, props
151
+
152
+ def _process_properties(self, table_name, props):
153
+ """Iterates through model properties and processes columns."""
76
154
  forget_keys = ["created_at", "updated_at", "deleted_at"]
77
155
  db = SQLAlchemy()
156
+
157
+ for key, val in props.items():
158
+ if key in forget_keys or key.startswith("_"):
159
+ continue
160
+ # Check if it's a SQLAlchemy Column or a proxied Column from __table__
161
+ if isinstance(val, (db.Column, Column)):
162
+ self._process_column(table_name, key, val)
163
+ # Potentially handle other property types here if needed
164
+
165
+ def _process_column(self, table_name, key, column_obj):
166
+ """Processes a single db.Column object to update the JSON schema."""
167
+ db = SQLAlchemy()
168
+ type_converter = {
169
+ db.String: "string",
170
+ TEXT: "string",
171
+ JSON: "object",
172
+ Integer: "integer",
173
+ db.Integer: "integer",
174
+ db.Boolean: "boolean",
175
+ db.SmallInteger: "integer",
176
+ db.Float: "number",
177
+ # Represent dates as strings in JSON schema (format: date)
178
+ db.Date: "string",
179
+ # Represent datetimes as strings (format: date-time)
180
+ db.DateTime: "string",
181
+ # Represent time as string (format: time)
182
+ db.Time: "string",
183
+ # Consider how to represent LargeBinary - maybe string with format 'binary'?
184
+ }
185
+ type_col = "null"
186
+ # Access the column type directly via column_obj.type
187
+ column_type = column_obj.type
188
+ for possible_type_class, repr_type in type_converter.items():
189
+ # Use isinstance for robust type checking
190
+ if isinstance(column_type, possible_type_class):
191
+ type_col = repr_type
192
+ break # Found the type
193
+
194
+ if type_col == "null":
195
+ click.echo(
196
+ f"Warning: Unknown column type '{column_type}' for {table_name}.{key}"
197
+ )
198
+ type_col = "string"
199
+
200
+ self.data[table_name]["items"]["properties"][key] = SuperDict(type=type_col)
201
+
202
+ # Add format for date/time types
203
+ if isinstance(column_type, db.Date):
204
+ self.data[table_name]["items"]["properties"][key]["format"] = "date"
205
+ elif isinstance(column_type, db.DateTime):
206
+ self.data[table_name]["items"]["properties"][key]["format"] = "date-time"
207
+ elif isinstance(column_type, db.Time):
208
+ self.data[table_name]["items"]["properties"][key]["format"] = "time"
209
+
210
+ # Handle foreign keys using column_obj.foreign_keys
211
+ if column_obj.foreign_keys:
212
+ # Assuming only one foreign key per column for simplicity here
213
+ fk = next(iter(column_obj.foreign_keys))
214
+ self.data[table_name]["items"]["properties"][key][
215
+ "foreign_key"
216
+ ] = fk.target_fullname
217
+
218
+ # Handle nullability using column_obj.nullable
219
+ if not column_obj.nullable:
220
+ # Ensure 'required' list exists before appending
221
+ if "required" not in self.data[table_name]["items"]:
222
+ self.data[table_name]["items"]["required"] = []
223
+ if key not in self.data[table_name]["items"]["required"]:
224
+ self.data[table_name]["items"]["required"].append(key)
225
+
226
+ def parse(self, files):
227
+ SQLAlchemy()
78
228
  try:
79
229
  for file_path, file_name in files:
80
- spec = importlib.util.spec_from_file_location(file_name, file_path)
81
- mod = importlib.util.module_from_spec(spec)
82
-
83
- spec.loader.exec_module(mod)
84
-
85
- models = SuperDict(mod.__dict__).kfilter(lambda k: k in self.parents)
86
- for model in models:
87
- if isinstance(models[model], MagicMock):
88
- # Models that inherit from other models that are relatively imported
89
- if not isinstance(mod.__dict__[model]._mock_return_value, dict):
90
- continue
91
- props = mod.__dict__[model]._mock_return_value
92
- elif mod.__dict__[model].__dict__.get("__abstract__"):
93
- # BaseDataModel
94
- props = mod.__dict__[model].__dict__
95
- self.parents[model] = None
96
- else:
97
- # Models that inherit from other models that are imported from libraries
98
- self.parents[model] = None
99
- tmp = mod.__dict__[model].__dict__
100
- props = {"__tablename__": tmp.get("__tablename__")}
101
- for col in tmp["__table__"]._columns:
102
- props[col.__dict__["key"]] = next(iter(col.proxy_set))
103
- table_name = props.get("__tablename__", model)
104
- self.data[table_name] = SuperDict(
105
- type="array", items=dict(properties=dict(), required=[])
106
- )
107
- if not props.get("__tablename__") and not self.leave_bases:
108
- self.data[table_name]["remove"] = True
109
- self.model_table[model] = table_name
110
- self.table_model[table_name] = model
111
- for key, val in props.items():
112
- if key in forget_keys:
113
- continue
114
- elif isinstance(val, db.Column):
115
- type_converter = {
116
- db.String: "string",
117
- TEXT: "string",
118
- JSON: "object",
119
- Integer: "integer",
120
- db.Integer: "integer",
121
- db.Boolean: "boolean",
122
- db.SmallInteger: "integer",
123
- db.Float: "number",
124
- }
125
- type_col = "null"
126
- for possible_type, repr_type in type_converter.items():
127
- if isinstance(val.type, possible_type):
128
- type_col = repr_type
129
- if type_col == "null":
130
- raise Exception("Unknown column type")
131
-
132
- self.data[table_name]["items"]["properties"][
133
- key
134
- ] = SuperDict(type=type_col)
135
- if val.foreign_keys:
136
- fk = list(val.foreign_keys)[0]
137
- self.data[table_name]["items"]["properties"][key][
138
- "foreign_key"
139
- ] = fk._colspec
140
- if not val.nullable:
141
- self.data[table_name]["items"]["required"].append(key)
142
-
143
- db.session.close()
230
+ mod = self._load_module(file_path, file_name)
231
+ if mod:
232
+ self._process_module(mod)
233
+
234
+ # Potential db.session cleanup if it was actually used and persisted
235
+ # If db is only used for type comparison, this might not be needed
236
+ # db.session.close() # Consider if this is necessary
237
+
144
238
  except Exception as err:
145
- click.echo(err)
239
+ click.echo(f"An error occurred during parsing: {err}")
146
240
 
147
241
  def inherit(self):
148
242
  all_classes = set(self.parents.keys())
@@ -180,7 +274,7 @@ class SchemaGenerator:
180
274
 
181
275
  def to_schema(self):
182
276
  return {
183
- "$schema": "http://json-schema.org/schema#",
277
+ "$schema": "https://json-schema.org/schema#",
184
278
  "type": "object",
185
279
  "properties": self.data,
186
280
  "required": list(self.data.keys()),
@@ -35,7 +35,7 @@ class SchemaGenerator:
35
35
  for key, val in self.schema["properties"].items():
36
36
  if key == "id":
37
37
  continue
38
- ty, nullable = get_type(val)
38
+ ty, _ = get_type(val)
39
39
  res += f" {key} = {JSON_TYPES_TO_FIELDS[ty]}("
40
40
  res += "required=False"
41
41
  res += ")\n"
@@ -44,7 +44,7 @@ class SchemaGenerator:
44
44
  def generate_post_schema(self):
45
45
  res = ""
46
46
  for key, val in self.schema["properties"].items():
47
- ty, nullable = get_type(val)
47
+ ty, _ = get_type(val)
48
48
  res += f" {key} = {JSON_TYPES_TO_FIELDS[ty]}("
49
49
  if key in self.schema["required"]:
50
50
  res += "required=True"
@@ -62,9 +62,8 @@ class SchemaGenerator:
62
62
  if not self.schema["properties"].get("id"):
63
63
  return " id = fields.Int(required=True)\n"
64
64
  else:
65
- id_type=self.schema["properties"].get("id")["type"]
66
- return f' id = {JSON_TYPES_TO_FIELDS[id_type]}(required=True)\n'
67
-
65
+ id_type = self.schema["properties"].get("id")["type"]
66
+ return f" id = {JSON_TYPES_TO_FIELDS[id_type]}(required=True)\n"
68
67
 
69
68
  def generate_schema(self):
70
69
  if not self.schema["properties"].get("id"):
cornflow/cli/users.py CHANGED
@@ -14,11 +14,19 @@ from cornflow.shared.exceptions import (
14
14
 
15
15
  @click.group(name="users", help="Commands to manage the users")
16
16
  def users():
17
+ """
18
+ This method is empty but it serves as the building block
19
+ for the rest of the commands
20
+ """
17
21
  pass
18
22
 
19
23
 
20
24
  @click.group(name="create", help="Create a user")
21
25
  def create():
26
+ """
27
+ This method is empty but it serves as the building block
28
+ for the rest of the commands
29
+ """
22
30
  pass
23
31
 
24
32
 
cornflow/cli/views.py CHANGED
@@ -8,6 +8,10 @@ from .utils import get_app
8
8
 
9
9
  @click.group(name="views", help="Commands to manage the views")
10
10
  def views():
11
+ """
12
+ This method is empty but it serves as the building block
13
+ for the rest of the commands
14
+ """
11
15
  pass
12
16
 
13
17
 
cornflow/commands/dag.py CHANGED
@@ -12,6 +12,7 @@ def register_deployed_dags_command(
12
12
  from cornflow_client.airflow.api import Airflow
13
13
  from cornflow.models import DeployedOrch
14
14
  from cornflow.shared import db
15
+ from cornflow.shared.const import AIRFLOW_NOT_REACHABLE_MSG
15
16
 
16
17
  af_client = Airflow(url, user, pwd)
17
18
  max_attempts = 20
@@ -19,12 +20,12 @@ def register_deployed_dags_command(
19
20
  while not af_client.is_alive() and attempts < max_attempts:
20
21
  attempts += 1
21
22
  if verbose:
22
- current_app.logger.info(f"Airflow is not reachable (attempt {attempts})")
23
+ current_app.logger.info(f"{AIRFLOW_NOT_REACHABLE_MSG} (attempt {attempts})")
23
24
  time.sleep(15)
24
25
 
25
26
  if not af_client.is_alive():
26
27
  if verbose:
27
- current_app.logger.info("Airflow is not reachable")
28
+ current_app.logger.info(f"{AIRFLOW_NOT_REACHABLE_MSG}")
28
29
  return False
29
30
 
30
31
  dags_registered = [dag.id for dag in DeployedOrch.get_all_objects()]
@@ -3,6 +3,7 @@ def update_schemas_command(url, user, pwd, verbose: bool = False):
3
3
  from flask import current_app
4
4
 
5
5
  from cornflow_client.airflow.api import Airflow
6
+ from cornflow.shared.const import AIRFLOW_NOT_REACHABLE_MSG
6
7
 
7
8
  af_client = Airflow(url, user, pwd)
8
9
  max_attempts = 20
@@ -10,12 +11,12 @@ def update_schemas_command(url, user, pwd, verbose: bool = False):
10
11
  while not af_client.is_alive() and attempts < max_attempts:
11
12
  attempts += 1
12
13
  if verbose == 1:
13
- current_app.logger.info(f"Airflow is not reachable (attempt {attempts})")
14
+ current_app.logger.info(f"{AIRFLOW_NOT_REACHABLE_MSG} (attempt {attempts})")
14
15
  time.sleep(15)
15
16
 
16
17
  if not af_client.is_alive():
17
18
  if verbose == 1:
18
- current_app.logger.info("Airflow is not reachable")
19
+ current_app.logger.info(f"{AIRFLOW_NOT_REACHABLE_MSG}")
19
20
  return False
20
21
 
21
22
  response = af_client.update_schemas()
@@ -34,6 +35,7 @@ def update_dag_registry_command(url, user, pwd, verbose: bool = False):
34
35
  from flask import current_app
35
36
 
36
37
  from cornflow_client.airflow.api import Airflow
38
+ from cornflow.shared.const import AIRFLOW_NOT_REACHABLE_MSG
37
39
 
38
40
  af_client = Airflow(url, user, pwd)
39
41
  max_attempts = 20
@@ -41,12 +43,12 @@ def update_dag_registry_command(url, user, pwd, verbose: bool = False):
41
43
  while not af_client.is_alive() and attempts < max_attempts:
42
44
  attempts += 1
43
45
  if verbose == 1:
44
- current_app.logger.info(f"Airflow is not reachable (attempt {attempts})")
46
+ current_app.logger.info(f"{AIRFLOW_NOT_REACHABLE_MSG} (attempt {attempts})")
45
47
  time.sleep(15)
46
48
 
47
49
  if not af_client.is_alive():
48
50
  if verbose == 1:
49
- current_app.logger.info("Airflow is not reachable")
51
+ current_app.logger.info(f"{AIRFLOW_NOT_REACHABLE_MSG}")
50
52
  return False
51
53
 
52
54
  response = af_client.update_dag_registry()
@@ -16,19 +16,16 @@ def create_user_with_role(
16
16
  current_app.logger.info(
17
17
  f"User {username} is created and assigned {role_name} role"
18
18
  )
19
- return True
19
+ return
20
20
 
21
21
  user_roles = UserRoleModel.get_all_objects(user_id=user.id)
22
22
  user_actual_roles = [ur.role for ur in user_roles]
23
- if (
24
- user_roles is not None
25
- and RoleModel.get_one_object(role) in user_actual_roles
26
- ):
23
+ if user_roles is not None and RoleModel.get_one_object(role) in user_actual_roles:
27
24
  if verbose:
28
25
  current_app.logger.info(
29
26
  f"User {username} exists and already has {role_name} role assigned"
30
27
  )
31
- return True
28
+ return
32
29
 
33
30
  user_role = UserRoleModel({"user_id": user.id, "role_id": role})
34
31
  user_role.save()
@@ -36,7 +33,6 @@ def create_user_with_role(
36
33
  current_app.logger.info(
37
34
  f"User {username} already exists and is assigned a {role_name} role"
38
35
  )
39
- return True
40
36
 
41
37
 
42
38
  def create_service_user_command(username, email, password, verbose: bool = True):
@@ -45,8 +41,9 @@ def create_service_user_command(username, email, password, verbose: bool = True)
45
41
 
46
42
  if username is None or email is None or password is None:
47
43
  current_app.logger.info("Missing required arguments")
48
- return False
49
- return create_user_with_role(
44
+ return
45
+
46
+ create_user_with_role(
50
47
  username, email, password, "serviceuser", SERVICE_ROLE, verbose
51
48
  )
52
49
 
@@ -57,10 +54,9 @@ def create_admin_user_command(username, email, password, verbose: bool = True):
57
54
 
58
55
  if username is None or email is None or password is None:
59
56
  current_app.logger.info("Missing required arguments")
60
- return False
61
- return create_user_with_role(
62
- username, email, password, "admin", ADMIN_ROLE, verbose
63
- )
57
+ return
58
+
59
+ create_user_with_role(username, email, password, "admin", ADMIN_ROLE, verbose)
64
60
 
65
61
 
66
62
  def create_planner_user_command(username, email, password, verbose: bool = True):
@@ -69,7 +65,6 @@ def create_planner_user_command(username, email, password, verbose: bool = True)
69
65
 
70
66
  if username is None or email is None or password is None:
71
67
  current_app.logger.info("Missing required arguments")
72
- return False
73
- return create_user_with_role(
74
- username, email, password, "planner", PLANNER_ROLE, verbose
75
- )
68
+ return
69
+
70
+ create_user_with_role(username, email, password, "planner", PLANNER_ROLE, verbose)
cornflow/config.py CHANGED
@@ -41,7 +41,7 @@ class DefaultConfig(object):
41
41
  DATABRICKS_TOKEN_ENDPOINT = os.getenv("DATABRICKS_TOKEN_ENDPOINT")
42
42
  DATABRICKS_EP_CLUSTERS = os.getenv("DATABRICKS_EP_CLUSTERS")
43
43
  DATABRICKS_CLIENT_ID = os.getenv("DATABRICKS_CLIENT_ID")
44
-
44
+ DATABRICKS_HEALTH_PATH = os.getenv("DATABRICKS_HEALTH_PATH", "default path")
45
45
  # If service user is allowed to log with username and password
46
46
  SERVICE_USER_ALLOW_PASSWORD_LOGIN = int(
47
47
  os.getenv("SERVICE_USER_ALLOW_PASSWORD_LOGIN", 1)
@@ -134,6 +134,7 @@ class Testing(DefaultConfig):
134
134
  OPEN_DEPLOYMENT = 1
135
135
  LOG_LEVEL = int(os.getenv("LOG_LEVEL", 10))
136
136
 
137
+
137
138
  class TestingDatabricks(Testing):
138
139
  CORNFLOW_BACKEND = DATABRICKS_BACKEND
139
140
 
@@ -176,5 +177,5 @@ app_config = {
176
177
  "production": Production,
177
178
  "testing-oauth": TestingOpenAuth,
178
179
  "testing-root": TestingApplicationRoot,
179
- "testing-databricks" : TestingDatabricks
180
+ "testing-databricks": TestingDatabricks,
180
181
  }