tdfs4ds 0.2.4.41__py3-none-any.whl → 0.2.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,8 @@
1
1
  import teradataml as tdml
2
2
  import tdfs4ds
3
3
  from tdfs4ds.utils.query_management import execute_query,execute_query_wrapper
4
+ from tdfs4ds import logger_safe
5
+
4
6
 
5
7
  def upgrade_process_catalog():
6
8
 
@@ -43,13 +45,13 @@ def upgrade_process_catalog():
43
45
  # Step 4: Rename the new table to the old table's name
44
46
  query_4 = f"""RENAME TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW TO {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME};"""
45
47
 
46
- print('creation of the ', f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW","table" )
48
+ logger_safe('info', f'creation of the {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW table')
47
49
  tdml.execute_sql(query_1)
48
- print('insert existing processes from',f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}", 'to',f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW")
50
+ logger_safe('info', f'insert existing processes from {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} to {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW')
49
51
  tdml.execute_sql(query_2)
50
- print('rename ',f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}",'to',f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_OLD")
52
+ logger_safe('info', f'rename {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} to {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_OLD')
51
53
  tdml.execute_sql(query_3)
52
- print('rename ,',f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW",'to',f"{tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}")
54
+ logger_safe('info', f'rename {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}_NEW to {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}')
53
55
  tdml.execute_sql(query_4)
54
56
 
55
57
  @execute_query_wrapper
@@ -263,79 +265,130 @@ def process_store_catalog_creation(if_exists='replace', comment='this table is a
263
265
  execute_query(query)
264
266
  if tdml.display.print_sqlmr_query:
265
267
  print(query)
266
- if tdfs4ds.DISPLAY_LOGS: print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been created')
268
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been created')
267
269
  execute_query(query3)
268
270
  except Exception as e:
269
271
  # If the table already exists and if_exists is set to 'replace', drop the table and recreate it
270
- if tdfs4ds.DISPLAY_LOGS: print(str(e).split('\n')[0])
272
+ logger_safe('error', str(e).split('\n')[0])
271
273
  if str(e).split('\n')[0].endswith('already exists.') and (if_exists == 'replace'):
272
274
  execute_query(f'DROP TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME}')
273
- print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been dropped')
275
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been dropped')
274
276
  try:
275
277
  # Attempt to recreate the table after dropping it
276
278
  execute_query(query)
277
- if tdfs4ds.DISPLAY_LOGS: print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been re-created')
279
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been re-created')
278
280
  if tdml.display.print_sqlmr_query:
279
281
  print(query)
280
282
  execute_query(query3)
281
283
  except Exception as e:
282
- print(str(e).split('\n')[0])
284
+ logger_safe('error', str(e).split('\n')[0])
283
285
 
284
286
  try:
285
287
  # Attempt to execute the create table query
286
288
  execute_query(query4)
287
289
  if tdml.display.print_sqlmr_query:
288
290
  print(query4)
289
- if tdfs4ds.DISPLAY_LOGS: print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME} has been created')
291
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME} has been created')
290
292
  execute_query(query5)
291
293
  except Exception as e:
292
294
  # If the table already exists and if_exists is set to 'replace', drop the table and recreate it
293
- if tdfs4ds.DISPLAY_LOGS: print(str(e).split('\n')[0])
295
+ logger_safe('error', str(e).split('\n')[0])
294
296
  if str(e).split('\n')[0].endswith('already exists.') and (if_exists == 'replace'):
295
297
  execute_query(f'DROP TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME}')
296
- print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME} has been dropped')
298
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME} has been dropped')
297
299
  try:
298
300
  # Attempt to recreate the table after dropping it
299
301
  execute_query(query4)
300
- if tdfs4ds.DISPLAY_LOGS: print(
301
- f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME} has been re-created')
302
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.DATA_DISTRIBUTION_NAME} has been re-created')
302
303
  if tdml.display.print_sqlmr_query:
303
304
  print(query4)
304
305
  execute_query(query5)
305
306
  except Exception as e:
306
- print(str(e).split('\n')[0])
307
+ logger_safe('error', str(e).split('\n')[0])
307
308
 
308
309
  try:
309
310
  # Attempt to execute the create table query
310
311
  execute_query(query6)
311
312
  if tdml.display.print_sqlmr_query:
312
313
  print(query6)
313
- if tdfs4ds.DISPLAY_LOGS: print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME} has been created')
314
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME} has been created')
314
315
  execute_query(query7)
315
316
  except Exception as e:
316
317
  # If the table already exists and if_exists is set to 'replace', drop the table and recreate it
317
- if tdfs4ds.DISPLAY_LOGS: print(str(e).split('\n')[0])
318
+ logger_safe('error', str(e).split('\n')[0])
318
319
  if str(e).split('\n')[0].endswith('already exists.') and (if_exists == 'replace'):
319
320
  execute_query(f'DROP TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME}')
320
- print(f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME} has been dropped')
321
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME} has been dropped')
321
322
  try:
322
323
  # Attempt to recreate the table after dropping it
323
324
  execute_query(query6)
324
- if tdfs4ds.DISPLAY_LOGS: print(
325
- f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME} has been re-created')
325
+ logger_safe('info', f'TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.FILTER_MANAGER_NAME} has been re-created')
326
326
  if tdml.display.print_sqlmr_query:
327
327
  print(query6)
328
328
  execute_query(query7)
329
329
  except Exception as e:
330
- print(str(e).split('\n')[0])
330
+ logger_safe('error', str(e).split('\n')[0])
331
331
 
332
332
  try:
333
333
  # Attempt to create the secondary index
334
334
  execute_query(query2)
335
335
  if tdml.display.print_sqlmr_query:
336
336
  print(query)
337
- if tdfs4ds.DISPLAY_LOGS: print(f'SECONDARY INDEX ON TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been created')
337
+ logger_safe('info', f'SECONDARY INDEX ON TABLE {tdfs4ds.SCHEMA}.{tdfs4ds.PROCESS_CATALOG_NAME} has been created')
338
+ except Exception as e:
339
+ logger_safe('error', str(e).split('\n')[0])
340
+
341
+ return tdfs4ds.PROCESS_CATALOG_NAME, tdfs4ds.DATA_DISTRIBUTION_NAME, tdfs4ds.FILTER_MANAGER_NAME
342
+
343
+ def get_process_info(process_id: str) -> dict:
344
+ """
345
+ Retrieve process information including SQL and columns from the process catalog.
346
+
347
+ Args:
348
+ process_id (str): The unique identifier of the process.
349
+ Returns:
350
+ dict: A dictionary containing process SQL and columns.
351
+ """
352
+ # Retrieve process SQL and columns
353
+ process_catalog = tdfs4ds.process_catalog()
354
+ try:
355
+ process_info = process_catalog[process_catalog['PROCESS_ID'] == process_id].to_pandas().to_dict(orient='records')[0]
356
+ process_info['ENTITY_COLUMNS'] = process_info['ENTITY_ID'].split(',')
357
+ process_info['FEATURE_COLUMNS'] = process_info['FEATURE_NAMES'].split(',')
358
+ except Exception as e:
359
+ logger_safe('error', f"Error retrieving process info: {e}")
360
+ return None
361
+
362
+ # get the SQL query:
363
+ if process_info:
364
+ process_sql = tdfs4ds.utils.lineage.get_ddl(
365
+ view_name = process_info['VIEW_NAME'].split('.')[1],
366
+ schema_name = process_info['VIEW_NAME'].split('.')[0],
367
+ object_type='view'
368
+ )
369
+ process_info['PROCESS_SQL'] = process_sql
370
+
371
+ # retrieve feature documentation
372
+ from tdfs4ds.genai.documentation import retrieve_documentation, retrieve_explain_documentation
373
+
374
+ try:
375
+ documentation = retrieve_documentation(process_id)
376
+ process_info['DOCUMENTED_SQL'] = documentation['DOCUMENTED_SQL']
377
+ process_info['DOCUMENTED_ENTITY_COLUMNS'] = documentation['DOCUMENTED_ENTITY_COLUMNS']
378
+ process_info['DOCUMENTED_FEATURE_COLUMNS'] = documentation['DOCUMENTED_FEATURE_COLUMNS']
379
+ process_info['ENTITY_DESCRIPTION'] = documentation['ENTITY_DESCRIPTION']
380
+ logger_safe('info', f"Successfully retrieved documentation for process_id {process_id}")
381
+ except Exception as e:
382
+ logger_safe('error', f"Error retrieving documentation: {e}")
383
+
384
+ try:
385
+ explain_documentation = retrieve_explain_documentation(process_id)
386
+ process_info['EXPLAIN_ANALYSIS'] = explain_documentation['EXPLAIN_ANALYSIS']
387
+ process_info['OPTIMIZATION_SCORE'] = explain_documentation['OPTIMIZATION_SCORE']
388
+ process_info['EXPLAIN_WARNINGS'] = explain_documentation['EXPLAIN_WARNINGS']
389
+ process_info['EXPLAIN_RECOMMENDATIONS'] = explain_documentation['EXPLAIN_RECOMMENDATIONS']
390
+ logger_safe('info', f"Successfully retrieved explanation for process_id {process_id}")
338
391
  except Exception as e:
339
- print(str(e).split('\n')[0])
392
+ logger_safe('error', f"Error retrieving explanation: {e}")
340
393
 
341
- return tdfs4ds.PROCESS_CATALOG_NAME, tdfs4ds.DATA_DISTRIBUTION_NAME, tdfs4ds.FILTER_MANAGER_NAME
394
+ return process_info
@@ -7,20 +7,30 @@ import json
7
7
 
8
8
 
9
9
  def get_hidden_table_name(schema_name, view_name):
10
- """
11
- Return the backing 'hidden' table name for a public view/table.
10
+ try:
11
+ ddl = tdfs4ds.utils.lineage.get_ddl(schema_name=schema_name, view_name=view_name)
12
12
 
13
- Args:
14
- table_name (str): Public-facing table/view name.
13
+ # Your current parsing is fragile; but keeping your intent:
14
+ backing = ddl.split("\n")[-2].split(".")[1]
15
+ return backing
15
16
 
16
- Returns:
17
- str: The corresponding hidden table name (suffix '_HIDDEN').
18
- """
19
-
20
- try:
21
- return tdfs4ds.utils.lineage.get_ddl(schema_name=schema_name, view_name=view_name).split('\n')[-2].split('.')[1]
22
17
  except Exception as e:
23
- logger_safe("warning", "Failed to extract hidden table name from DDL; defaulting to suffix method.", e)
18
+ msg = str(e)
19
+
20
+ # Teradata "object does not exist" -> normal for first-time setup
21
+ if "Error 3807" in msg or "does not exist" in msg.lower():
22
+ logger_safe(
23
+ "info",
24
+ "View %s.%s not found; using default hidden table name %s_HIDDEN.",
25
+ schema_name, view_name, view_name
26
+ )
27
+ else:
28
+ logger_safe(
29
+ "warning",
30
+ "Failed to extract hidden table name from DDL; defaulting to suffix method. Error: %s",
31
+ msg.splitlines()[0]
32
+ )
33
+
24
34
  return view_name + "_HIDDEN"
25
35
 
26
36
 
@@ -227,11 +237,26 @@ class FilterManager:
227
237
  primary_index = [self.filter_id_name]
228
238
 
229
239
  logger.debug("Writing hidden table", extra={"primary_index": primary_index})
240
+ data_types = tdfs4ds.utils.info.get_feature_types_sql_format(df_filter)
241
+ query_creation = f"""
242
+ CREATE TABLE {self.schema_name}.{self.table_name} (
243
+ {', '.join([f'"{k}" {v}' for k,v in data_types.items()])}
244
+ ) UNIQUE PRIMARY INDEX ({primary_index[0]})
245
+ """
246
+
247
+ logger.debug("Creating hidden table with SQL:\n%s", query_creation)
248
+ try:
249
+ tdml.execute_sql(f"DROP TABLE {self.schema_name}.{self.table_name}")
250
+ except Exception:
251
+ logger.debug("Hidden table did not exist; proceeding to create.")
252
+ pass # Ignore if table does not exist
253
+ tdml.execute_sql(query_creation)
254
+ logger.debug("Inserting filter data into hidden table")
255
+
230
256
  df_filter.to_sql(
231
257
  table_name=self.table_name,
232
258
  schema_name=self.schema_name,
233
- if_exists="replace",
234
- primary_index=primary_index,
259
+ if_exists="append"
235
260
  )
236
261
 
237
262
  # Create/replace public view with filter_id = 1
@@ -245,6 +270,8 @@ class FilterManager:
245
270
  tdml.execute_sql(view_sql)
246
271
 
247
272
  # Collect stats to help the optimizer
273
+ for col in df_filter.columns:
274
+ tdml.execute_sql(f"COLLECT STATISTICS ON {self.schema_name}.{self.table_name} COLUMN ({col})")
248
275
  stats_sql = f"""
249
276
  COLLECT STATISTICS USING NO SAMPLE AND NO THRESHOLD
250
277
  COLUMN ({self.filter_id_name})
@@ -8,16 +8,33 @@ import numpy as np
8
8
  import pandas as pd
9
9
 
10
10
 
11
- def get_hidden_table_name(table_name: str) -> str:
12
- """Return the hidden table name associated with a public view name.
11
+ def get_hidden_table_name(schema_name, view_name):
12
+ try:
13
+ ddl = tdfs4ds.utils.lineage.get_ddl(schema_name=schema_name, view_name=view_name)
13
14
 
14
- Args:
15
- table_name: Base table or view name.
15
+ # Your current parsing is fragile; but keeping your intent:
16
+ backing = ddl.split("\n")[-2].split(".")[1]
17
+ return backing
18
+
19
+ except Exception as e:
20
+ msg = str(e)
21
+
22
+ # Teradata "object does not exist" -> normal for first-time setup
23
+ if "Error 3807" in msg or "does not exist" in msg.lower():
24
+ logger_safe(
25
+ "info",
26
+ "View %s.%s not found; using default hidden table name %s_HIDDEN.",
27
+ schema_name, view_name, view_name
28
+ )
29
+ else:
30
+ logger_safe(
31
+ "warning",
32
+ "Failed to extract hidden table name from DDL; defaulting to suffix method. Error: %s",
33
+ msg.splitlines()[0]
34
+ )
35
+
36
+ return view_name + "_HIDDEN"
16
37
 
17
- Returns:
18
- The hidden table name (e.g., "<name>_HIDDEN").
19
- """
20
- return f"{table_name}_HIDDEN"
21
38
 
22
39
 
23
40
  class TimeManager:
@@ -91,7 +108,7 @@ class TimeManager:
91
108
  schema_name: Schema that contains/should contain the objects.
92
109
  """
93
110
  self.schema_name = schema_name
94
- self.table_name = get_hidden_table_name(table_name)
111
+ self.table_name = get_hidden_table_name(schema_name=schema_name,view_name=table_name)
95
112
  self.view_name = table_name
96
113
  self.time_id = 'time_id'
97
114
  self.nb_time_steps = None
@@ -461,7 +478,7 @@ class TimeManager:
461
478
 
462
479
  if clone_mode == "hard":
463
480
  # Hard clone → create (or reuse) a NEW hidden table in this schema
464
- self.table_name = get_hidden_table_name(self.view_name)
481
+ self.table_name = get_hidden_table_name(schema_name=self.schema_name, view_name=self.view_name)
465
482
  existing_dest = [t.lower() for t in tdml.db_list_tables(schema_name=self.schema_name).TableName.values]
466
483
 
467
484
  if self.table_name.lower() in existing_dest:
@@ -555,7 +572,7 @@ class TimeManager:
555
572
  if current_time_id is None:
556
573
  current_time_id = 1
557
574
 
558
- dest_table = get_hidden_table_name(self.view_name)
575
+ dest_table = get_hidden_table_name(schema_name=self.schema_name, view_name=self.view_name)
559
576
 
560
577
  if not create_copy:
561
578
  # Only mark as owned if we already match <schema, view_HIDDEN>
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tdfs4ds
3
- Version: 0.2.4.41
3
+ Version: 0.2.5.1
4
4
  Summary: A python package to simplify the usage of feature store using Teradata Vantage ...
5
5
  Author: Denis Molin
6
6
  Requires-Python: >=3.6
@@ -1,13 +1,5 @@
1
- tdfs/__init__.py,sha256=7AcO7uB1opRCt7t2JOHworKimfAaDeO3boRW7u9Geo8,23
2
- tdfs/datasets.py,sha256=-b2MPEKGki2V1M8iUcoDR9uc2krIK7u1CK-EhChvihs,985
3
- tdfs/feature_store.py,sha256=Honu7eOAXxP4Ivz0mRlhuNkfTDzgZl5HB1WlQUwzcZ0,31354
4
- tdfs/data/curves.csv,sha256=q0Tm-0yu7VMK4lHvHpgi1LMeRq0lO5gJy2Q17brKbEM,112488
5
- tdfs4ds/__init__.py,sha256=vmrTjmXxX7NsqMIUYGLt3x72Gg1nazDUU9pCE_-1dpU,61203
1
+ tdfs4ds/__init__.py,sha256=K8Op6s1VOllung41Px6bjyzivUJPnJx-WthTPh2Rtsg,70555
6
2
  tdfs4ds/datasets.py,sha256=LE4Gn0muwdyrIrCrbkE92cnafUML63z1lj5bFIIVzmc,3524
7
- tdfs4ds/feature_engineering.py,sha256=oVnZ2V_XNGE12LKC_fNfkrWSQZLgtYRmaf8Dispi6S4,7081
8
- tdfs4ds/feature_store.py,sha256=y-oItPZw6nBkBcGAceaATZbkLPTsvpk0OnpzTxYofDs,68576
9
- tdfs4ds/process_store.py,sha256=W97pwqOwabo062ow_LfAXZmlSkcq8xTuwhwAX1EStlQ,16939
10
- tdfs4ds/utils.py,sha256=xF1VP0NCgosXcKymOo_ofMMnvLEF228IxaxIl-f65uA,23312
11
3
  tdfs4ds/data/curves.csv,sha256=q0Tm-0yu7VMK4lHvHpgi1LMeRq0lO5gJy2Q17brKbEM,112488
12
4
  tdfs4ds/data/logo/tdfs4ds_logo.png,sha256=OCKQnH0gQbRyupwZeiIgo-9c6mdRtjE2E2Zunr_4Ae0,363980
13
5
  tdfs4ds/data/logo/teradata_sym_rgb_pos.png,sha256=Zq-QzLb04PIQ4iN8C6ssaLuNVVI1Q_TqBkFx_f7aNOI,8052
@@ -17,22 +9,24 @@ tdfs4ds/dataset/dataset.py,sha256=J_fgfsVdR9zSOXrUOqyotqsUD-GlQMGyuld6ueov45w,76
17
9
  tdfs4ds/dataset/dataset_catalog.py,sha256=qxS2thDW2MvsRouSFaX1M0sX2J7IzBAYD8Yf22Tsd5k,16638
18
10
  tdfs4ds/feature_store/__init__.py,sha256=a7NPCkpTx40UR5LRErwnskpABG2Vuib7F5wUjaUGCnI,209
19
11
  tdfs4ds/feature_store/entity_management.py,sha256=9ltytv3yCTG84NZXBpb1Tlkf9pOxvrNb0MVidU4pwvE,10157
20
- tdfs4ds/feature_store/feature_data_processing.py,sha256=gjwypiTfwTyGyrP20v35Vu2uGIrCY80OBBeMVBsdjuk,45020
12
+ tdfs4ds/feature_store/feature_data_processing.py,sha256=gXBsr1H05zxM4tWE7y29ucxeoTu1jQITOwTXqi1Y2pk,45214
21
13
  tdfs4ds/feature_store/feature_query_retrieval.py,sha256=51c6ZNlLFiBIxNPinS8ot8bjWEIb1QV2eVg69yzVF80,35381
22
14
  tdfs4ds/feature_store/feature_store_management.py,sha256=mtPQkdMDhcOrhj9IAaH-FEP_znK53cYtEv8zXAbsigg,52123
15
+ tdfs4ds/genai/__init__.py,sha256=Hal13Kw75nDYKHtfvHZNdm98exqmY6qaqGZkJA2TQ6E,723
16
+ tdfs4ds/genai/documentation.py,sha256=m3PAIsvGF2_vd1cGm3sG0EE0pLwLG9RqcsxlhkuSXQ0,82056
23
17
  tdfs4ds/process_store/__init__.py,sha256=npHR_xju5ecGmWfYHDyteLwiU3x-cL4HD3sFK_th7xY,229
24
18
  tdfs4ds/process_store/process_followup.py,sha256=E4jgQahjhVRBbfAW3JXNLId7H5qV8ozRt-6PyAQuPzg,12583
25
19
  tdfs4ds/process_store/process_query_administration.py,sha256=AOufkJ6DFUpBiGm-6Q6Dq0Aovw31UGTscZ3Ya0ewS-0,7851
26
20
  tdfs4ds/process_store/process_registration_management.py,sha256=2fFjt4Pmh3An1BUFvRX3xABSlQrlWiEiPQStH3A9Xpk,36130
27
- tdfs4ds/process_store/process_store_catalog_management.py,sha256=eVUU9uanyXCUkzi2vcHbJPL9qFiXVasnCxPGr-r9EY8,16090
21
+ tdfs4ds/process_store/process_store_catalog_management.py,sha256=s-2JAaOx-m_hANVWGLCRL-FnoD_dzT_BqQdN74Vxzcw,18770
28
22
  tdfs4ds/utils/__init__.py,sha256=-yTMfDLZbQnIRQ64s_bczzT21tDW2A8FZeq9PX5SgFU,168
29
- tdfs4ds/utils/filter_management.py,sha256=VgBNVopymznxDvv8_oxGVFw83e2C06Z7nLZlYuor27I,27412
23
+ tdfs4ds/utils/filter_management.py,sha256=rF8v0fws-ZR9og7x2TBqS5VWpOit1cwgF2xNkb_Hw1w,28635
30
24
  tdfs4ds/utils/info.py,sha256=sShnUxXMlvCtQ6xtShDhqdpTr6sMG0dZQhNBFgUENDY,12058
31
25
  tdfs4ds/utils/lineage.py,sha256=gy5M42qy5fvdWmlohAY3WPYoqAyp5VakeEmeT1YjrJQ,37839
32
26
  tdfs4ds/utils/query_management.py,sha256=kWDeTdsYcbpV5Tyhh-8uLRWvXh16nIdXNIJ97w76aNU,4848
33
- tdfs4ds/utils/time_management.py,sha256=asIWvK5K81NNwAGqC-9Tv4Timscxyv0vyuPFs01whu0,31461
27
+ tdfs4ds/utils/time_management.py,sha256=g3EJO7I8ERoZ4X7yq5SyDqSE4O9p0BRcv__QPuAxbGA,32243
34
28
  tdfs4ds/utils/visualization.py,sha256=5S528KoKzzkrAdCxfy7ecyqKvAXBoibNvHwz_u5ISMs,23167
35
- tdfs4ds-0.2.4.41.dist-info/METADATA,sha256=dKFroIkGVKCcM4JPHN1HyIEGU_dGwTEi8EMCsDCGgMc,14326
36
- tdfs4ds-0.2.4.41.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
37
- tdfs4ds-0.2.4.41.dist-info/top_level.txt,sha256=wMyVkMvnBn8RRt1xBveGQxOpWFijPMPkMiE7G2mi8zo,8
38
- tdfs4ds-0.2.4.41.dist-info/RECORD,,
29
+ tdfs4ds-0.2.5.1.dist-info/METADATA,sha256=cRP6PxvcR88XuPDgIh_AagK-4eGmLaGjeF4zfE6AJSY,14325
30
+ tdfs4ds-0.2.5.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
31
+ tdfs4ds-0.2.5.1.dist-info/top_level.txt,sha256=wMyVkMvnBn8RRt1xBveGQxOpWFijPMPkMiE7G2mi8zo,8
32
+ tdfs4ds-0.2.5.1.dist-info/RECORD,,
tdfs/__init__.py DELETED
@@ -1 +0,0 @@
1
- __version__ = '0.1.0.0'