datapipelab 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@ from datapipelab.app.node.tnode import TNode
2
2
  from datapipelab.logger import logger
3
3
 
4
4
  class CustomNode(TNode):
5
- def __init__(self, spark, tnode_config, t_df):
5
+ def __init__(self, spark, tnode_config, t_df=None):
6
6
  super().__init__(spark=spark)
7
7
  self.tnode_config = tnode_config
8
8
  self.spark = spark
@@ -10,7 +10,7 @@ class CustomNode(TNode):
10
10
  module_name = tnode_config['options']['module_name']
11
11
  module_path = tnode_config['options']['module_path']
12
12
  class_name = tnode_config['options']['class_name']
13
- self.custom_processor = self.import_module(module_name, module_path, class_name)
13
+ self.custom_node = self.import_module(module_name, module_path, class_name)
14
14
 
15
15
  def import_module(self, module_name, module_path, class_name):
16
16
  custom_module = __import__(module_path, fromlist=[module_name])
@@ -19,4 +19,4 @@ class CustomNode(TNode):
19
19
 
20
20
  def _process(self):
21
21
  logger.info(f"Custom node type is {self.tnode_config.get('custom_node_type', 'N/A!')}")
22
- return self.custom_processor.process()
22
+ return self.custom_node.custom_process()
@@ -1,4 +1,4 @@
1
- from datapipelab.app.node.processor.custom_node import CustomNode
1
+ from datapipelab.app.node.custom_node import CustomNode
2
2
  from datapipelab.app.node.processor.shell_node import ShellProcessorNode
3
3
  from datapipelab.app.node.source.hive_node import HiveSourceNode
4
4
  from datapipelab.app.node.source.spark_node import SparkSourceNode
@@ -40,6 +40,8 @@ class PipelineHandler:
40
40
  if input_type == 'adls_path':
41
41
  if input_format == 'delta':
42
42
  source_df = DeltaSourceNode(self.spark, tnode_config).run()
43
+ if input_type == 'custom':
44
+ source_df = CustomNode(self.spark, tnode_config).run()
43
45
 
44
46
  return source_df
45
47
 
@@ -82,8 +84,7 @@ class PipelineHandler:
82
84
  if tnode_format == "csv":
83
85
  PandasCSVSinkNode(self.spark, tnode_config, t_df).run()
84
86
  if tnode_type == 'custom':
85
- from datapipelab.app.node import custom_node
86
- processor_df = custom_node.CustomNode(self.spark, tnode_config).run()
87
+ CustomNode(self.spark, tnode_config).run()
87
88
  if tnode_type == 'spark':
88
89
  if tnode_format == 'hive':
89
90
  from datapipelab.app.node.sink import hive_node
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datapipelab
3
- Version: 0.2.6
3
+ Version: 0.2.8
4
4
  Summary: A data pipeline library with connectors, sources, processors, and sinks.
5
5
  Requires-Dist: json5
6
6
  Requires-Dist: loguru
@@ -3,11 +3,11 @@ datapipelab/engine.py,sha256=3QRsedRYNov6xIDOZ1tukinFE-SKv39Fn3sNCnD3L6g,442
3
3
  datapipelab/logger.py,sha256=Ugv0A4TfD3JWCWXNWu0lURcnfAEyuVrK3IrvVVgcHBo,864
4
4
  datapipelab/pipeline.py,sha256=dw9D9KM_hztt9g_YzqoNgQBRyCYR92cRZwrU5duP_Pg,1464
5
5
  datapipelab/pipeline_config.py,sha256=2bFAJepViE7rT7CaRANZU07aeQpOYcZ954ISujm9pXA,3816
6
- datapipelab/pipeline_handler.py,sha256=1kH5jwT_YJ7LY8VfyLiRv2miLbH0TWd_6H3pKjqTABk,4763
6
+ datapipelab/pipeline_handler.py,sha256=m-7coayAvUeOhL-5zfMeeyENK7dRt33uK6h-5-8P7U8,4771
7
7
  datapipelab/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  datapipelab/app/connector_node/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  datapipelab/app/node/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- datapipelab/app/node/custom_node.py,sha256=VvjwkECTobRhO_fYKUrJCd117B5MoR9P6UKYZfRLhV4,1017
10
+ datapipelab/app/node/custom_node.py,sha256=3Se4DweMvm5VK4MTZ-pQSQ_lE_fOm6cGj-wzcqKEU0E,1019
11
11
  datapipelab/app/node/tnode.py,sha256=-2hnQkIuLwEy7xVTig54TByO7L2l7UujolXMQL0CQJA,484
12
12
  datapipelab/app/node/processor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  datapipelab/app/node/processor/bigquery_api_node.py,sha256=IclDkGxo9ltGJVkBaHKFPFCSlEEyzefgalaAOLA17bE,1752
@@ -26,7 +26,7 @@ datapipelab/app/node/source/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
26
26
  datapipelab/app/node/source/delta_node.py,sha256=gg7SfuKBAAfjk6OX2jNrot9XX61HoBe3us3D8O-dscE,529
27
27
  datapipelab/app/node/source/hive_node.py,sha256=h_AMCnnmon7uLRIGsaHAPWEReD3VaWZXnz9r0TpLGNM,478
28
28
  datapipelab/app/node/source/spark_node.py,sha256=S_x2atRFPDnXmhCUtcmaLc4BDFd2H4uQq6wnEJb7Uug,480
29
- datapipelab-0.2.6.dist-info/METADATA,sha256=X_M1o2nDMD557iyAEh3qCcYNPt7QK_jxF441_gDbhlw,220
30
- datapipelab-0.2.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
31
- datapipelab-0.2.6.dist-info/top_level.txt,sha256=HgeBjHvXorKzvNqU5BNPutoI771HtiqVit9_-0Zyrb4,12
32
- datapipelab-0.2.6.dist-info/RECORD,,
29
+ datapipelab-0.2.8.dist-info/METADATA,sha256=hr8HekJq24z6Uz9xPMfeNObw6RP1Kx3uyO-3jzDH-do,220
30
+ datapipelab-0.2.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
31
+ datapipelab-0.2.8.dist-info/top_level.txt,sha256=HgeBjHvXorKzvNqU5BNPutoI771HtiqVit9_-0Zyrb4,12
32
+ datapipelab-0.2.8.dist-info/RECORD,,