dataflow-core 2.1.10__py3-none-any.whl → 2.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dataflow-core might be problematic. Click here for more details.

dataflow/environment.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import os, shutil, subprocess, datetime, yaml, re
2
- from .models.environment import JobLogs, Environment
2
+ from .models.environment import JobLogs, Environment, LocalEnvironment
3
3
  import json, asyncio, pkg_resources
4
4
  from sqlalchemy.orm import Session
5
5
  from .configuration import ConfigurationManager
@@ -12,6 +12,7 @@ class EnvironmentManager:
12
12
  self.env_base_path = self.config.get_config_value('paths', 'env_path')
13
13
  self.env_logs_path = self.config.get_config_value('paths', 'env_logs_path')
14
14
  self.env_version_path = self.config.get_config_value('paths', 'env_versions_path')
15
+ self.local_env_logs_path = self.config.get_config_value('paths', 'local_env_logs_path')
15
16
  os.makedirs(self.env_version_path, exist_ok=True)
16
17
  self.logger = CustomLogger().get_logger(__name__)
17
18
 
@@ -78,7 +79,7 @@ class EnvironmentManager:
78
79
  self.logger.error(f"Invalid status '{status}' provided for environment creation.")
79
80
  raise ValueError("Invalid status. Use 'draft' or 'published'.")
80
81
 
81
- async def clone_env(self, source_path, env_name, pip_libraries, conda_libraries, user_name, db=None):
82
+ async def clone_env(self, source_path, env_name, pip_libraries, conda_libraries, user_name, db=None, local_clone=False):
82
83
  """
83
84
  Clones an existing conda environment.
84
85
 
@@ -96,7 +97,10 @@ class EnvironmentManager:
96
97
  # Set up logging
97
98
  log_file_location = None
98
99
  if db:
99
- log_file_location = self._setup_logging(env_name, "1", user_name, db)
100
+ if local_clone:
101
+ log_file_location = os.path.join(self.local_env_logs_path, f"{env_name}.log")
102
+ else:
103
+ log_file_location = self._setup_logging(env_name, "1", user_name, db)
100
104
 
101
105
  yaml_path = f"{self.env_version_path}/{env_name}_v1.yaml"
102
106
 
@@ -104,7 +108,7 @@ class EnvironmentManager:
104
108
  clone_status = await self._execute_env_operation(
105
109
  env_name=env_name,
106
110
  status="draft",
107
- mode="clone",
111
+ mode="local_clone" if local_clone else "clone",
108
112
  yaml_file_path=yaml_path,
109
113
  source_path=source_path,
110
114
  log_file_location=log_file_location,
@@ -113,16 +117,22 @@ class EnvironmentManager:
113
117
 
114
118
  # Update job log status if db was provided
115
119
  if db and log_file_location:
116
- log_file_name = os.path.basename(log_file_location)
117
- await self._update_job_status(log_file_name, clone_status, log_file_location, db)
118
- self.update_environment_db(
119
- env_short_name=env_name,
120
- version="1",
121
- pip_libraries=pip_libraries,
122
- conda_libraries=conda_libraries,
123
- status=clone_status,
124
- db=db
125
- )
120
+ if local_clone:
121
+ db.query(LocalEnvironment).filter(
122
+ LocalEnvironment.name == env_name
123
+ ).update({"status": clone_status.title()})
124
+ db.commit()
125
+ else:
126
+ log_file_name = os.path.basename(log_file_location)
127
+ await self._update_job_status(log_file_name, clone_status, log_file_location, db)
128
+ self.update_environment_db(
129
+ env_short_name=env_name,
130
+ version="1",
131
+ pip_libraries=pip_libraries,
132
+ conda_libraries=conda_libraries,
133
+ status=clone_status,
134
+ db=db
135
+ )
126
136
 
127
137
  return clone_status
128
138
 
@@ -198,13 +208,15 @@ class EnvironmentManager:
198
208
  """
199
209
  self.logger.info(f"Executing environment operation: {env_name}, Status: {status}, Mode: {mode}")
200
210
  status = status.lower()
201
- conda_env_path = os.path.join(self.env_base_path, env_name)
211
+ if mode == "local_clone":
212
+ conda_env_path = os.path.join(os.getenv("CONDA_ENVS_PATH"), env_name)
213
+ else:
214
+ os.makedirs(self.env_base_path, exist_ok=True)
215
+ conda_env_path = os.path.join(self.env_base_path, env_name)
202
216
 
203
217
  try:
204
218
  if os.path.exists(conda_env_path) and mode == "create":
205
219
  raise FileExistsError(f"Environment '{env_name}' already exists at {conda_env_path}.")
206
-
207
- os.makedirs(conda_env_path, exist_ok=True)
208
220
 
209
221
  if mode == "create":
210
222
  create_env_script_path = pkg_resources.resource_filename('dataflow', 'scripts/create_environment.sh')
@@ -216,8 +228,13 @@ class EnvironmentManager:
216
228
 
217
229
  elif mode == "clone":
218
230
  clone_env_script_path = pkg_resources.resource_filename('dataflow', 'scripts/clone_environment.sh')
219
- command = ["bash", clone_env_script_path, source_path, conda_env_path]
220
-
231
+ command = ["bash", clone_env_script_path, source_path, conda_env_path, yaml_file_path]
232
+
233
+ elif mode == "local_clone":
234
+ conda_cmd = f"conda create --name {env_name} --clone {source_path} --yes"
235
+ command = ["su", "-", os.environ.get("NB_USER"), "-c", conda_cmd]
236
+ self.logger.info(f"Cloning environment locally with command: {' '.join(command)}")
237
+
221
238
  else:
222
239
  raise ValueError("Invalid mode. Use 'create', 'update', or 'clone'.")
223
240
 
@@ -2,7 +2,8 @@ from sqlalchemy import Column, Integer, String, Boolean, Text, ForeignKey, DateT
2
2
  from sqlalchemy.orm import relationship
3
3
  from sqlalchemy.sql import func
4
4
  from datetime import datetime, timezone
5
- from dataflow.db import Base, Local_Base
5
+ from dataflow.db import Base
6
+ from enum import Enum
6
7
 
7
8
  class EnvironmentAttributes(Base):
8
9
  """
@@ -62,14 +63,20 @@ class JobLogs(Base):
62
63
  created_by = Column(String)
63
64
 
64
65
 
65
- class LocalEnvironment(Local_Base):
66
+ class LocalEnvironment(Base):
66
67
  __tablename__ = "LOCAL_ENVIRONMENT"
67
68
 
68
69
  id = Column(Integer, primary_key=True, autoincrement=True)
69
- name = Column(String, nullable=False)
70
+ name = Column(String, nullable=False, index=True)
71
+ user_name = Column(String, ForeignKey('USER.user_name', ondelete='CASCADE'), nullable=False, index=True)
70
72
  py_version = Column(String)
71
73
  pip_libraries = Column(Text)
72
74
  conda_libraries = Column(Text)
73
75
  status = Column(String, default="Created")
76
+ cloned_from = Column(String, ForeignKey('ENVIRONMENT.short_name', ondelete='SET NULL'), nullable=True)
74
77
  updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc))
75
78
  need_refresh = Column(Boolean, default=False)
79
+
80
+ class EnvType(str, Enum):
81
+ dataflow = "dataflow"
82
+ local = "local"
@@ -3,6 +3,7 @@ set -e
3
3
 
4
4
  source_env_name=$1
5
5
  target_env_path=$2
6
+ yaml_file_path=$3
6
7
 
7
8
  # Extract just the env name (basename) from the target path
8
9
  env_name=$(basename "$target_env_path")
@@ -14,6 +15,6 @@ mkdir -p "$CONDA_PKGS_DIRS"
14
15
  # 1. Cloning conda env
15
16
  conda create --clone ${source_env_name} --prefix ${target_env_path} --yes
16
17
 
17
- conda env export --prefix "$conda_env_path" > "$yaml_file_path"
18
+ conda env export --prefix "$target_env_path" > "$yaml_file_path"
18
19
 
19
20
  echo "Environment Creation Successful"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dataflow-core
3
- Version: 2.1.10
3
+ Version: 2.1.11
4
4
  Summary: Dataflow core package
5
5
  Author: Dataflow
6
6
  Author-email:
@@ -7,13 +7,13 @@ dataflow/configuration.py,sha256=7To6XwH1eESiYp39eqPcswXWwrdBUdPF6xN6WnazOF0,663
7
7
  dataflow/database_manager.py,sha256=tJHMuOZ9Muskrh9t4uLRlTuFU0VkHAzoHlGP5DORIC4,899
8
8
  dataflow/dataflow.py,sha256=A66OFXgJCxW18Dk_BnasvR82LrPXQ2iB-QMTyYJJ1cc,13100
9
9
  dataflow/db.py,sha256=73ojGqpCTRVTlPszD73Ozhjih_BI2KTHmazqxxL6iWk,3780
10
- dataflow/environment.py,sha256=05F-dBRyZu2mr26vuiuJU13pfgtRpA9yW69bk83HGUw,27239
10
+ dataflow/environment.py,sha256=qiyuRRPpVLVWiYccRHXnGyWMr_ZBPWzixAyDtAzxQYE,28277
11
11
  dataflow/models/__init__.py,sha256=WnlLd-0T3HYtJloDms1a58lN66WzBIbBSTReDXD6HaQ,892
12
12
  dataflow/models/app_types.py,sha256=yE_ZB13lhpK7AZ7PyBwnQlf0RlIHYs_-vdMKx7_RMlY,379
13
13
  dataflow/models/blacklist_library.py,sha256=B2oi3Z8GcR_glhLAyinFk0W8c9txXvm3uOER6dY-q7I,991
14
14
  dataflow/models/connection.py,sha256=_VJL3KuIrm8t4lJmtunIL3-AXF9Yvi5wUolzdR3tE0E,1017
15
15
  dataflow/models/dataflow_zone.py,sha256=yFCvQXos5M1cU7ksbVSO338_RkT3hkdw2wr3kCJ_rec,769
16
- dataflow/models/environment.py,sha256=300CUwx7d7bjadrMsTch7NSRYLCuf9AK9WVzlT26CAc,2616
16
+ dataflow/models/environment.py,sha256=3W-Pvkuiufjw0MWy9F6uWSd0HDPjRNNJe3TnhpHxcGg,2920
17
17
  dataflow/models/environment_status.py,sha256=lvPDNUsUoTW9D97B07aKqJQHRKp4LvPM28pQDMPH1ac,536
18
18
  dataflow/models/git_ssh.py,sha256=W15SDypxzGOz_aZkHEnVZ6DIMVsjAsbSIXVIEt2mPYU,694
19
19
  dataflow/models/pinned_projects.py,sha256=I-XMQq7__XJJi2lyOdEvQEfhPRz8D6KHA6Cbavbf05o,606
@@ -34,7 +34,7 @@ dataflow/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
34
34
  dataflow/schemas/connection.py,sha256=1bKTSEPW1QwcFQe35QCIr-S5DTMr34WJCjSKhxMpPek,2449
35
35
  dataflow/schemas/git_ssh.py,sha256=N1O7HM6ZbygIBZn2rKvNR0e7IM3ZJMAH6aJtjaghDr0,1283
36
36
  dataflow/schemas/secret.py,sha256=dAN_2IBTjnjf4sT6cyPVd_Zcdaw8s_EMG4aodMoCwXU,1165
37
- dataflow/scripts/clone_environment.sh,sha256=xWJBw9z1W1rztrzLXYro3UtEdFuBSqNrB83y45zqFfE,487
37
+ dataflow/scripts/clone_environment.sh,sha256=Qy0GylsA3kUVUL_L1MirxIWujOFhT1tikKqXNtCTWd4,506
38
38
  dataflow/scripts/create_environment.sh,sha256=3FHgNplJuEZvyTsLqlCJNX9oyfXgsfqn80VZk2xtvso,828
39
39
  dataflow/scripts/update_environment.sh,sha256=2dtn2xlNi6frpig-sqlGE1_IKRbbkqYOCpf_qyMKKII,992
40
40
  dataflow/secrets_manager/__init__.py,sha256=idGqIDtYl0De2WIK9Obl-N7SDPSYtVM0D-wXfZjCiy4,559
@@ -48,8 +48,8 @@ dataflow/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
48
  dataflow/utils/exceptions.py,sha256=8GRFoYZ5dPGQckVm2znaHpPi0ZAs69fK-RGKukEsapk,4432
49
49
  dataflow/utils/get_current_user.py,sha256=4nSO3SPVMZhW-MsIgxR3f9ZzrFaIZIuyrM6hvfyE7PQ,1202
50
50
  dataflow/utils/logger.py,sha256=7BFrOq5Oiqn8P4XZbgJzMP5O07d2fpdECbbfsjrUuHw,1213
51
- dataflow_core-2.1.10.dist-info/METADATA,sha256=kzjEwzhHHXfO29gQPh5S30jzY4SZRwEySKXD7RZmKDs,370
52
- dataflow_core-2.1.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- dataflow_core-2.1.10.dist-info/entry_points.txt,sha256=ppj_EIbYrJJwCPg1kfdsZk5q1N-Ejfis1neYrnjhO8o,117
54
- dataflow_core-2.1.10.dist-info/top_level.txt,sha256=SZsUOpSCK9ntUy-3Tusxzf5A2e8ebwD8vouPb1dPt_8,23
55
- dataflow_core-2.1.10.dist-info/RECORD,,
51
+ dataflow_core-2.1.11.dist-info/METADATA,sha256=5ascZ5HFZFObYNT9mPFD1jrOrxoeFhUbyZX3kkjydzI,370
52
+ dataflow_core-2.1.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ dataflow_core-2.1.11.dist-info/entry_points.txt,sha256=ppj_EIbYrJJwCPg1kfdsZk5q1N-Ejfis1neYrnjhO8o,117
54
+ dataflow_core-2.1.11.dist-info/top_level.txt,sha256=SZsUOpSCK9ntUy-3Tusxzf5A2e8ebwD8vouPb1dPt_8,23
55
+ dataflow_core-2.1.11.dist-info/RECORD,,