datatailr 0.1.12__tar.gz → 0.1.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datatailr might be problematic. Click here for more details.

Files changed (38) hide show
  1. {datatailr-0.1.12/src/datatailr.egg-info → datatailr-0.1.14}/PKG-INFO +9 -9
  2. {datatailr-0.1.12 → datatailr-0.1.14}/README.md +8 -8
  3. {datatailr-0.1.12 → datatailr-0.1.14}/pyproject.toml +1 -1
  4. {datatailr-0.1.12 → datatailr-0.1.14/src/datatailr.egg-info}/PKG-INFO +9 -9
  5. {datatailr-0.1.12 → datatailr-0.1.14}/LICENSE +0 -0
  6. {datatailr-0.1.12 → datatailr-0.1.14}/setup.cfg +0 -0
  7. {datatailr-0.1.12 → datatailr-0.1.14}/setup.py +0 -0
  8. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/__init__.py +0 -0
  9. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/acl.py +0 -0
  10. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/blob.py +0 -0
  11. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/build/__init__.py +0 -0
  12. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/build/image.py +0 -0
  13. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/dt_json.py +0 -0
  14. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/errors.py +0 -0
  15. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/group.py +0 -0
  16. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/logging.py +0 -0
  17. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/__init__.py +0 -0
  18. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/arguments_cache.py +0 -0
  19. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/base.py +0 -0
  20. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/batch.py +0 -0
  21. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/batch_decorator.py +0 -0
  22. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/constants.py +0 -0
  23. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/schedule.py +0 -0
  24. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/scheduler/utils.py +0 -0
  25. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/user.py +0 -0
  26. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/utils.py +0 -0
  27. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/version.py +0 -0
  28. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr/wrapper.py +0 -0
  29. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr.egg-info/SOURCES.txt +0 -0
  30. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr.egg-info/dependency_links.txt +0 -0
  31. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr.egg-info/entry_points.txt +0 -0
  32. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr.egg-info/requires.txt +0 -0
  33. {datatailr-0.1.12 → datatailr-0.1.14}/src/datatailr.egg-info/top_level.txt +0 -0
  34. {datatailr-0.1.12 → datatailr-0.1.14}/src/sbin/datatailr_run.py +0 -0
  35. {datatailr-0.1.12 → datatailr-0.1.14}/src/sbin/datatailr_run_app.py +0 -0
  36. {datatailr-0.1.12 → datatailr-0.1.14}/src/sbin/datatailr_run_batch.py +0 -0
  37. {datatailr-0.1.12 → datatailr-0.1.14}/src/sbin/datatailr_run_excel.py +0 -0
  38. {datatailr-0.1.12 → datatailr-0.1.14}/src/sbin/datatailr_run_service.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.12
3
+ Version: 0.1.14
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -86,12 +86,12 @@ The following example shows how to create a simple data pipeline using the Datat
86
86
  ```python
87
87
  from datatailr.scheduler import batch, Batch
88
88
 
89
- @batch()
89
+ @batch_job_job()
90
90
  def func_no_args() -> str:
91
91
  return "no_args"
92
92
 
93
93
 
94
- @batch()
94
+ @batch_job()
95
95
  def func_with_args(a: int, b: float) -> str:
96
96
  return f"args: {a}, {b}"
97
97
 
@@ -102,7 +102,7 @@ with Batch(name="MY test DAG", local_run=True) as dag:
102
102
  ```
103
103
 
104
104
  Running this code will create a graph of jobs and execute it.
105
- Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch()`.
105
+ Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch_job()`.
106
106
 
107
107
  Since this is a local run then the execution of each node will happen sequentially in the same process.
108
108
 
@@ -117,14 +117,14 @@ You will first need to separate your function definitions from the DAG definitio
117
117
  ```python
118
118
  # my_module.py
119
119
 
120
- from datatailr.scheduler import batch, Batch
120
+ from datatailr.scheduler import batch_job, Batch
121
121
 
122
- @batch()
122
+ @batch_job()
123
123
  def func_no_args() -> str:
124
124
  return "no_args"
125
125
 
126
126
 
127
- @batch()
127
+ @batch_job()
128
128
  def func_with_args(a: int, b: float) -> str:
129
129
  return f"args: {a}, {b}"
130
130
  ```
@@ -133,9 +133,9 @@ To use these functions in a batch job, you just need to import them and run in a
133
133
 
134
134
  ```python
135
135
  from my_module import func_no_args, func_with_args
136
- from datatailr.scheduler import Schedule
136
+ from datatailr.scheduler import Batch, Schedule
137
137
 
138
- schedule = Schedule(at_hour=0)
138
+ schedule = Schedule(at_hours=0)
139
139
 
140
140
  with Batch(name="MY test DAG", schedule=schedule) as dag:
141
141
  for n in range(2):
@@ -49,12 +49,12 @@ The following example shows how to create a simple data pipeline using the Datat
49
49
  ```python
50
50
  from datatailr.scheduler import batch, Batch
51
51
 
52
- @batch()
52
+ @batch_job_job()
53
53
  def func_no_args() -> str:
54
54
  return "no_args"
55
55
 
56
56
 
57
- @batch()
57
+ @batch_job()
58
58
  def func_with_args(a: int, b: float) -> str:
59
59
  return f"args: {a}, {b}"
60
60
 
@@ -65,7 +65,7 @@ with Batch(name="MY test DAG", local_run=True) as dag:
65
65
  ```
66
66
 
67
67
  Running this code will create a graph of jobs and execute it.
68
- Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch()`.
68
+ Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch_job()`.
69
69
 
70
70
  Since this is a local run then the execution of each node will happen sequentially in the same process.
71
71
 
@@ -80,14 +80,14 @@ You will first need to separate your function definitions from the DAG definitio
80
80
  ```python
81
81
  # my_module.py
82
82
 
83
- from datatailr.scheduler import batch, Batch
83
+ from datatailr.scheduler import batch_job, Batch
84
84
 
85
- @batch()
85
+ @batch_job()
86
86
  def func_no_args() -> str:
87
87
  return "no_args"
88
88
 
89
89
 
90
- @batch()
90
+ @batch_job()
91
91
  def func_with_args(a: int, b: float) -> str:
92
92
  return f"args: {a}, {b}"
93
93
  ```
@@ -96,9 +96,9 @@ To use these functions in a batch job, you just need to import them and run in a
96
96
 
97
97
  ```python
98
98
  from my_module import func_no_args, func_with_args
99
- from datatailr.scheduler import Schedule
99
+ from datatailr.scheduler import Batch, Schedule
100
100
 
101
- schedule = Schedule(at_hour=0)
101
+ schedule = Schedule(at_hours=0)
102
102
 
103
103
  with Batch(name="MY test DAG", schedule=schedule) as dag:
104
104
  for n in range(2):
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "datatailr"
7
- version = "0.1.12"
7
+ version = "0.1.14"
8
8
  description = "Ready-to-Use Platform That Drives Business Insights"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.12
3
+ Version: 0.1.14
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -86,12 +86,12 @@ The following example shows how to create a simple data pipeline using the Datat
86
86
  ```python
87
87
  from datatailr.scheduler import batch, Batch
88
88
 
89
- @batch()
89
+ @batch_job_job()
90
90
  def func_no_args() -> str:
91
91
  return "no_args"
92
92
 
93
93
 
94
- @batch()
94
+ @batch_job()
95
95
  def func_with_args(a: int, b: float) -> str:
96
96
  return f"args: {a}, {b}"
97
97
 
@@ -102,7 +102,7 @@ with Batch(name="MY test DAG", local_run=True) as dag:
102
102
  ```
103
103
 
104
104
  Running this code will create a graph of jobs and execute it.
105
- Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch()`.
105
+ Each node on the graph represents a job, which in turn is a call to a function decorated with `@batch_job()`.
106
106
 
107
107
  Since this is a local run then the execution of each node will happen sequentially in the same process.
108
108
 
@@ -117,14 +117,14 @@ You will first need to separate your function definitions from the DAG definitio
117
117
  ```python
118
118
  # my_module.py
119
119
 
120
- from datatailr.scheduler import batch, Batch
120
+ from datatailr.scheduler import batch_job, Batch
121
121
 
122
- @batch()
122
+ @batch_job()
123
123
  def func_no_args() -> str:
124
124
  return "no_args"
125
125
 
126
126
 
127
- @batch()
127
+ @batch_job()
128
128
  def func_with_args(a: int, b: float) -> str:
129
129
  return f"args: {a}, {b}"
130
130
  ```
@@ -133,9 +133,9 @@ To use these functions in a batch job, you just need to import them and run in a
133
133
 
134
134
  ```python
135
135
  from my_module import func_no_args, func_with_args
136
- from datatailr.scheduler import Schedule
136
+ from datatailr.scheduler import Batch, Schedule
137
137
 
138
- schedule = Schedule(at_hour=0)
138
+ schedule = Schedule(at_hours=0)
139
139
 
140
140
  with Batch(name="MY test DAG", schedule=schedule) as dag:
141
141
  for n in range(2):
File without changes
File without changes
File without changes