dataflow-core 2.1.10__tar.gz → 2.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dataflow-core might be problematic. Click here for more details.

Files changed (60) hide show
  1. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/PKG-INFO +1 -1
  2. dataflow_core-2.1.11/README.md +1 -0
  3. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/environment.py +36 -19
  4. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/environment.py +10 -3
  5. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/scripts/clone_environment.sh +2 -1
  6. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow_core.egg-info/PKG-INFO +1 -1
  7. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow_core.egg-info/SOURCES.txt +1 -0
  8. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/setup.py +1 -1
  9. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/authenticator/__init__.py +0 -0
  10. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/authenticator/dataflowairflowauthenticator.py +0 -0
  11. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/authenticator/dataflowhubauthenticator.py +0 -0
  12. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/authenticator/dataflowsupersetauthenticator.py +0 -0
  13. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/__init__.py +0 -0
  14. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/configuration.py +0 -0
  15. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/database_manager.py +0 -0
  16. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/dataflow.py +0 -0
  17. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/db.py +0 -0
  18. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/__init__.py +0 -0
  19. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/app_types.py +0 -0
  20. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/blacklist_library.py +0 -0
  21. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/connection.py +0 -0
  22. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/dataflow_zone.py +0 -0
  23. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/environment_status.py +0 -0
  24. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/git_ssh.py +0 -0
  25. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/pinned_projects.py +0 -0
  26. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/project_details.py +0 -0
  27. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/recent_project_studio.py +0 -0
  28. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/recent_projects.py +0 -0
  29. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/role.py +0 -0
  30. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/role_server.py +0 -0
  31. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/role_zone.py +0 -0
  32. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/server_config.py +0 -0
  33. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/session.py +0 -0
  34. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/team.py +0 -0
  35. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/user.py +0 -0
  36. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/user_environment.py +0 -0
  37. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/user_team.py +0 -0
  38. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/models/variables.py +0 -0
  39. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/schemas/__init__.py +0 -0
  40. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/schemas/connection.py +0 -0
  41. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/schemas/git_ssh.py +0 -0
  42. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/schemas/secret.py +0 -0
  43. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/scripts/create_environment.sh +0 -0
  44. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/scripts/update_environment.sh +0 -0
  45. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/__init__.py +0 -0
  46. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/factory.py +0 -0
  47. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/interface.py +0 -0
  48. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/providers/__init__.py +0 -0
  49. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/providers/aws_manager.py +0 -0
  50. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/providers/azure_manager.py +0 -0
  51. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/secrets_manager/service.py +0 -0
  52. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/utils/__init__.py +0 -0
  53. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/utils/exceptions.py +0 -0
  54. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/utils/get_current_user.py +0 -0
  55. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow/utils/logger.py +0 -0
  56. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow_core.egg-info/dependency_links.txt +0 -0
  57. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow_core.egg-info/entry_points.txt +0 -0
  58. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow_core.egg-info/requires.txt +0 -0
  59. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/dataflow_core.egg-info/top_level.txt +0 -0
  60. {dataflow_core-2.1.10 → dataflow_core-2.1.11}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dataflow-core
3
- Version: 2.1.10
3
+ Version: 2.1.11
4
4
  Summary: Dataflow core package
5
5
  Author: Dataflow
6
6
  Author-email:
@@ -0,0 +1 @@
1
+ # Dataflow-Core
@@ -1,5 +1,5 @@
1
1
  import os, shutil, subprocess, datetime, yaml, re
2
- from .models.environment import JobLogs, Environment
2
+ from .models.environment import JobLogs, Environment, LocalEnvironment
3
3
  import json, asyncio, pkg_resources
4
4
  from sqlalchemy.orm import Session
5
5
  from .configuration import ConfigurationManager
@@ -12,6 +12,7 @@ class EnvironmentManager:
12
12
  self.env_base_path = self.config.get_config_value('paths', 'env_path')
13
13
  self.env_logs_path = self.config.get_config_value('paths', 'env_logs_path')
14
14
  self.env_version_path = self.config.get_config_value('paths', 'env_versions_path')
15
+ self.local_env_logs_path = self.config.get_config_value('paths', 'local_env_logs_path')
15
16
  os.makedirs(self.env_version_path, exist_ok=True)
16
17
  self.logger = CustomLogger().get_logger(__name__)
17
18
 
@@ -78,7 +79,7 @@ class EnvironmentManager:
78
79
  self.logger.error(f"Invalid status '{status}' provided for environment creation.")
79
80
  raise ValueError("Invalid status. Use 'draft' or 'published'.")
80
81
 
81
- async def clone_env(self, source_path, env_name, pip_libraries, conda_libraries, user_name, db=None):
82
+ async def clone_env(self, source_path, env_name, pip_libraries, conda_libraries, user_name, db=None, local_clone=False):
82
83
  """
83
84
  Clones an existing conda environment.
84
85
 
@@ -96,7 +97,10 @@ class EnvironmentManager:
96
97
  # Set up logging
97
98
  log_file_location = None
98
99
  if db:
99
- log_file_location = self._setup_logging(env_name, "1", user_name, db)
100
+ if local_clone:
101
+ log_file_location = os.path.join(self.local_env_logs_path, f"{env_name}.log")
102
+ else:
103
+ log_file_location = self._setup_logging(env_name, "1", user_name, db)
100
104
 
101
105
  yaml_path = f"{self.env_version_path}/{env_name}_v1.yaml"
102
106
 
@@ -104,7 +108,7 @@ class EnvironmentManager:
104
108
  clone_status = await self._execute_env_operation(
105
109
  env_name=env_name,
106
110
  status="draft",
107
- mode="clone",
111
+ mode="local_clone" if local_clone else "clone",
108
112
  yaml_file_path=yaml_path,
109
113
  source_path=source_path,
110
114
  log_file_location=log_file_location,
@@ -113,16 +117,22 @@ class EnvironmentManager:
113
117
 
114
118
  # Update job log status if db was provided
115
119
  if db and log_file_location:
116
- log_file_name = os.path.basename(log_file_location)
117
- await self._update_job_status(log_file_name, clone_status, log_file_location, db)
118
- self.update_environment_db(
119
- env_short_name=env_name,
120
- version="1",
121
- pip_libraries=pip_libraries,
122
- conda_libraries=conda_libraries,
123
- status=clone_status,
124
- db=db
125
- )
120
+ if local_clone:
121
+ db.query(LocalEnvironment).filter(
122
+ LocalEnvironment.name == env_name
123
+ ).update({"status": clone_status.title()})
124
+ db.commit()
125
+ else:
126
+ log_file_name = os.path.basename(log_file_location)
127
+ await self._update_job_status(log_file_name, clone_status, log_file_location, db)
128
+ self.update_environment_db(
129
+ env_short_name=env_name,
130
+ version="1",
131
+ pip_libraries=pip_libraries,
132
+ conda_libraries=conda_libraries,
133
+ status=clone_status,
134
+ db=db
135
+ )
126
136
 
127
137
  return clone_status
128
138
 
@@ -198,13 +208,15 @@ class EnvironmentManager:
198
208
  """
199
209
  self.logger.info(f"Executing environment operation: {env_name}, Status: {status}, Mode: {mode}")
200
210
  status = status.lower()
201
- conda_env_path = os.path.join(self.env_base_path, env_name)
211
+ if mode == "local_clone":
212
+ conda_env_path = os.path.join(os.getenv("CONDA_ENVS_PATH"), env_name)
213
+ else:
214
+ os.makedirs(self.env_base_path, exist_ok=True)
215
+ conda_env_path = os.path.join(self.env_base_path, env_name)
202
216
 
203
217
  try:
204
218
  if os.path.exists(conda_env_path) and mode == "create":
205
219
  raise FileExistsError(f"Environment '{env_name}' already exists at {conda_env_path}.")
206
-
207
- os.makedirs(conda_env_path, exist_ok=True)
208
220
 
209
221
  if mode == "create":
210
222
  create_env_script_path = pkg_resources.resource_filename('dataflow', 'scripts/create_environment.sh')
@@ -216,8 +228,13 @@ class EnvironmentManager:
216
228
 
217
229
  elif mode == "clone":
218
230
  clone_env_script_path = pkg_resources.resource_filename('dataflow', 'scripts/clone_environment.sh')
219
- command = ["bash", clone_env_script_path, source_path, conda_env_path]
220
-
231
+ command = ["bash", clone_env_script_path, source_path, conda_env_path, yaml_file_path]
232
+
233
+ elif mode == "local_clone":
234
+ conda_cmd = f"conda create --name {env_name} --clone {source_path} --yes"
235
+ command = ["su", "-", os.environ.get("NB_USER"), "-c", conda_cmd]
236
+ self.logger.info(f"Cloning environment locally with command: {' '.join(command)}")
237
+
221
238
  else:
222
239
  raise ValueError("Invalid mode. Use 'create', 'update', or 'clone'.")
223
240
 
@@ -2,7 +2,8 @@ from sqlalchemy import Column, Integer, String, Boolean, Text, ForeignKey, DateT
2
2
  from sqlalchemy.orm import relationship
3
3
  from sqlalchemy.sql import func
4
4
  from datetime import datetime, timezone
5
- from dataflow.db import Base, Local_Base
5
+ from dataflow.db import Base
6
+ from enum import Enum
6
7
 
7
8
  class EnvironmentAttributes(Base):
8
9
  """
@@ -62,14 +63,20 @@ class JobLogs(Base):
62
63
  created_by = Column(String)
63
64
 
64
65
 
65
- class LocalEnvironment(Local_Base):
66
+ class LocalEnvironment(Base):
66
67
  __tablename__ = "LOCAL_ENVIRONMENT"
67
68
 
68
69
  id = Column(Integer, primary_key=True, autoincrement=True)
69
- name = Column(String, nullable=False)
70
+ name = Column(String, nullable=False, index=True)
71
+ user_name = Column(String, ForeignKey('USER.user_name', ondelete='CASCADE'), nullable=False, index=True)
70
72
  py_version = Column(String)
71
73
  pip_libraries = Column(Text)
72
74
  conda_libraries = Column(Text)
73
75
  status = Column(String, default="Created")
76
+ cloned_from = Column(String, ForeignKey('ENVIRONMENT.short_name', ondelete='SET NULL'), nullable=True)
74
77
  updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc))
75
78
  need_refresh = Column(Boolean, default=False)
79
+
80
+ class EnvType(str, Enum):
81
+ dataflow = "dataflow"
82
+ local = "local"
@@ -3,6 +3,7 @@ set -e
3
3
 
4
4
  source_env_name=$1
5
5
  target_env_path=$2
6
+ yaml_file_path=$3
6
7
 
7
8
  # Extract just the env name (basename) from the target path
8
9
  env_name=$(basename "$target_env_path")
@@ -14,6 +15,6 @@ mkdir -p "$CONDA_PKGS_DIRS"
14
15
  # 1. Cloning conda env
15
16
  conda create --clone ${source_env_name} --prefix ${target_env_path} --yes
16
17
 
17
- conda env export --prefix "$conda_env_path" > "$yaml_file_path"
18
+ conda env export --prefix "$target_env_path" > "$yaml_file_path"
18
19
 
19
20
  echo "Environment Creation Successful"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dataflow-core
3
- Version: 2.1.10
3
+ Version: 2.1.11
4
4
  Summary: Dataflow core package
5
5
  Author: Dataflow
6
6
  Author-email:
@@ -1,3 +1,4 @@
1
+ README.md
1
2
  setup.py
2
3
  authenticator/__init__.py
3
4
  authenticator/dataflowairflowauthenticator.py
@@ -14,7 +14,7 @@ class PostInstall(install):
14
14
 
15
15
  setup(
16
16
  name="dataflow-core",
17
- version="2.1.10",
17
+ version="2.1.11",
18
18
  packages=find_packages(include=["dataflow", "dataflow.*", "authenticator", "authenticator.*"]),
19
19
  include_package_data=True,
20
20
  package_data={
File without changes