datatailr 0.1.11__tar.gz → 0.1.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datatailr might be problematic. Click here for more details.

Files changed (38) hide show
  1. {datatailr-0.1.11/src/datatailr.egg-info → datatailr-0.1.12}/PKG-INFO +2 -2
  2. {datatailr-0.1.11 → datatailr-0.1.12}/README.md +1 -1
  3. {datatailr-0.1.11 → datatailr-0.1.12}/pyproject.toml +1 -1
  4. {datatailr-0.1.11 → datatailr-0.1.12/src/datatailr.egg-info}/PKG-INFO +2 -2
  5. {datatailr-0.1.11 → datatailr-0.1.12}/LICENSE +0 -0
  6. {datatailr-0.1.11 → datatailr-0.1.12}/setup.cfg +0 -0
  7. {datatailr-0.1.11 → datatailr-0.1.12}/setup.py +0 -0
  8. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/__init__.py +0 -0
  9. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/acl.py +0 -0
  10. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/blob.py +0 -0
  11. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/build/__init__.py +0 -0
  12. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/build/image.py +0 -0
  13. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/dt_json.py +0 -0
  14. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/errors.py +0 -0
  15. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/group.py +0 -0
  16. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/logging.py +0 -0
  17. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/__init__.py +0 -0
  18. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/arguments_cache.py +0 -0
  19. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/base.py +0 -0
  20. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/batch.py +0 -0
  21. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/batch_decorator.py +0 -0
  22. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/constants.py +0 -0
  23. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/schedule.py +0 -0
  24. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/scheduler/utils.py +0 -0
  25. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/user.py +0 -0
  26. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/utils.py +0 -0
  27. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/version.py +0 -0
  28. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr/wrapper.py +0 -0
  29. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr.egg-info/SOURCES.txt +0 -0
  30. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr.egg-info/dependency_links.txt +0 -0
  31. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr.egg-info/entry_points.txt +0 -0
  32. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr.egg-info/requires.txt +0 -0
  33. {datatailr-0.1.11 → datatailr-0.1.12}/src/datatailr.egg-info/top_level.txt +0 -0
  34. {datatailr-0.1.11 → datatailr-0.1.12}/src/sbin/datatailr_run.py +0 -0
  35. {datatailr-0.1.11 → datatailr-0.1.12}/src/sbin/datatailr_run_app.py +0 -0
  36. {datatailr-0.1.11 → datatailr-0.1.12}/src/sbin/datatailr_run_batch.py +0 -0
  37. {datatailr-0.1.11 → datatailr-0.1.12}/src/sbin/datatailr_run_excel.py +0 -0
  38. {datatailr-0.1.11 → datatailr-0.1.12}/src/sbin/datatailr_run_service.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.11
3
+ Version: 0.1.12
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -108,7 +108,7 @@ Since this is a local run then the execution of each node will happen sequential
108
108
 
109
109
  To take advantage of the datatailr platform and execute the graph at scale, you can run it using the job scheduler as presented in the next section.
110
110
 
111
- ### Execution at Scale
111
+ ## Execution at Scale
112
112
  To execute the graph at scale, you can use the Datatailr job scheduler. This allows you to run your jobs in parallel, taking advantage of the underlying infrastructure.
113
113
 
114
114
  You will first need to separate your function definitions from the DAG definition. This means you should define your functions as a separate module, which can be imported into the DAG definition.
@@ -71,7 +71,7 @@ Since this is a local run then the execution of each node will happen sequential
71
71
 
72
72
  To take advantage of the datatailr platform and execute the graph at scale, you can run it using the job scheduler as presented in the next section.
73
73
 
74
- ### Execution at Scale
74
+ ## Execution at Scale
75
75
  To execute the graph at scale, you can use the Datatailr job scheduler. This allows you to run your jobs in parallel, taking advantage of the underlying infrastructure.
76
76
 
77
77
  You will first need to separate your function definitions from the DAG definition. This means you should define your functions as a separate module, which can be imported into the DAG definition.
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "datatailr"
7
- version = "0.1.11"
7
+ version = "0.1.12"
8
8
  description = "Ready-to-Use Platform That Drives Business Insights"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datatailr
3
- Version: 0.1.11
3
+ Version: 0.1.12
4
4
  Summary: Ready-to-Use Platform That Drives Business Insights
5
5
  Author-email: Datatailr <info@datatailr.com>
6
6
  License-Expression: MIT
@@ -108,7 +108,7 @@ Since this is a local run then the execution of each node will happen sequential
108
108
 
109
109
  To take advantage of the datatailr platform and execute the graph at scale, you can run it using the job scheduler as presented in the next section.
110
110
 
111
- ### Execution at Scale
111
+ ## Execution at Scale
112
112
  To execute the graph at scale, you can use the Datatailr job scheduler. This allows you to run your jobs in parallel, taking advantage of the underlying infrastructure.
113
113
 
114
114
  You will first need to separate your function definitions from the DAG definition. This means you should define your functions as a separate module, which can be imported into the DAG definition.
File without changes
File without changes
File without changes