jerry-thomas 2.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datapipeline/cli/commands/stream.py +14 -8
- datapipeline/pipeline/stages.py +12 -1
- datapipeline/services/paths.py +10 -1
- datapipeline/services/scaffold/stream_plan.py +19 -0
- datapipeline/templates/plugin_skeleton/your-dataset/project.yaml +2 -1
- datapipeline/templates/plugin_skeleton/your-interim-data-builder/project.yaml +2 -1
- datapipeline/templates/stubs/dto.py.j2 +1 -1
- datapipeline/templates/stubs/filter.py.j2 +1 -1
- datapipeline/templates/stubs/mappers/ingest.py.j2 +3 -0
- datapipeline/templates/stubs/parser.py.j2 +1 -1
- datapipeline/templates/stubs/record.py.j2 +1 -0
- {jerry_thomas-2.0.0.dist-info → jerry_thomas-2.0.1.dist-info}/METADATA +7 -20
- {jerry_thomas-2.0.0.dist-info → jerry_thomas-2.0.1.dist-info}/RECORD +17 -17
- {jerry_thomas-2.0.0.dist-info → jerry_thomas-2.0.1.dist-info}/WHEEL +0 -0
- {jerry_thomas-2.0.0.dist-info → jerry_thomas-2.0.1.dist-info}/entry_points.txt +0 -0
- {jerry_thomas-2.0.0.dist-info → jerry_thomas-2.0.1.dist-info}/licenses/LICENSE +0 -0
- {jerry_thomas-2.0.0.dist-info → jerry_thomas-2.0.1.dist-info}/top_level.txt +0 -0
|
@@ -39,11 +39,12 @@ from datapipeline.services.scaffold.utils import (
|
|
|
39
39
|
|
|
40
40
|
def handle(*, plugin_root: Path | None = None, workspace: WorkspaceContext | None = None) -> None:
|
|
41
41
|
root_dir, pkg_name, _ = pkg_root(plugin_root)
|
|
42
|
-
project_yaml = resolve_default_project_yaml(
|
|
42
|
+
project_yaml = resolve_default_project_yaml(
|
|
43
|
+
workspace) or resolve_project_yaml_path(root_dir)
|
|
43
44
|
|
|
44
45
|
# Shared context
|
|
45
|
-
provider = prompt_required("Provider")
|
|
46
|
-
dataset = prompt_required("Dataset")
|
|
46
|
+
provider = prompt_required("Provider name (e.g. nasa)")
|
|
47
|
+
dataset = prompt_required("Dataset name (e.g. weather)")
|
|
47
48
|
source_id = f"{provider}.{dataset}"
|
|
48
49
|
|
|
49
50
|
# Collected actions (execute at end)
|
|
@@ -110,7 +111,8 @@ def handle(*, plugin_root: Path | None = None, workspace: WorkspaceContext | Non
|
|
|
110
111
|
]
|
|
111
112
|
if choice == "fs":
|
|
112
113
|
fmt_options.append(("pickle", "pickle"))
|
|
113
|
-
fmt = pick_from_menu(
|
|
114
|
+
fmt = pick_from_menu(
|
|
115
|
+
"Format:", fmt_options, allow_default=False)
|
|
114
116
|
else:
|
|
115
117
|
fmt = None
|
|
116
118
|
loader_ep, loader_args = default_loader_config(choice, fmt)
|
|
@@ -142,7 +144,8 @@ def handle(*, plugin_root: Path | None = None, workspace: WorkspaceContext | Non
|
|
|
142
144
|
[(k, k) for k in sorted(parsers.keys())],
|
|
143
145
|
)
|
|
144
146
|
elif pchoice == "create":
|
|
145
|
-
dto_default = dto_class_name(
|
|
147
|
+
dto_default = dto_class_name(
|
|
148
|
+
f"{provider}_{dataset}") if provider and dataset else None
|
|
146
149
|
dto_class, parser_create_dto = choose_existing_or_create_name(
|
|
147
150
|
label=LABEL_DTO_FOR_PARSER,
|
|
148
151
|
existing=sorted(list_dtos(root=plugin_root).keys()),
|
|
@@ -228,7 +231,8 @@ def handle(*, plugin_root: Path | None = None, workspace: WorkspaceContext | Non
|
|
|
228
231
|
default=default_mapper_name_for_identity(domain),
|
|
229
232
|
)
|
|
230
233
|
else:
|
|
231
|
-
mapper_name = choose_name(
|
|
234
|
+
mapper_name = choose_name(
|
|
235
|
+
"Mapper name", default=default_mapper_name(mapper_input_module, domain))
|
|
232
236
|
elif mchoice == "identity":
|
|
233
237
|
mapper_ep = "identity"
|
|
234
238
|
else:
|
|
@@ -264,9 +268,11 @@ def handle(*, plugin_root: Path | None = None, workspace: WorkspaceContext | Non
|
|
|
264
268
|
domain=domain,
|
|
265
269
|
)
|
|
266
270
|
elif mchoice == "existing":
|
|
267
|
-
mapper_plan = MapperPlan(
|
|
271
|
+
mapper_plan = MapperPlan(
|
|
272
|
+
create=False, mapper_ep=mapper_ep, domain=domain)
|
|
268
273
|
else:
|
|
269
|
-
mapper_plan = MapperPlan(
|
|
274
|
+
mapper_plan = MapperPlan(
|
|
275
|
+
create=False, mapper_ep="identity", domain=domain)
|
|
270
276
|
|
|
271
277
|
plan = StreamPlan(
|
|
272
278
|
provider=provider,
|
datapipeline/pipeline/stages.py
CHANGED
|
@@ -25,7 +25,18 @@ from datapipeline.transforms.utils import get_field, partition_key
|
|
|
25
25
|
|
|
26
26
|
def open_source_stream(context: PipelineContext, stream_alias: str) -> Source:
|
|
27
27
|
runtime = context.runtime
|
|
28
|
-
|
|
28
|
+
registry = runtime.registries.stream_sources
|
|
29
|
+
try:
|
|
30
|
+
source = registry.get(stream_alias)
|
|
31
|
+
except KeyError as exc:
|
|
32
|
+
available = sorted(registry.keys())
|
|
33
|
+
available_text = ", ".join(available) if available else "(none)"
|
|
34
|
+
raise KeyError(
|
|
35
|
+
"Unknown record_stream "
|
|
36
|
+
f"'{stream_alias}'. Check dataset.yaml and contracts/ ids. "
|
|
37
|
+
f"Available streams: {available_text}"
|
|
38
|
+
) from exc
|
|
39
|
+
return source.stream()
|
|
29
40
|
|
|
30
41
|
|
|
31
42
|
def build_record_stream(
|
datapipeline/services/paths.py
CHANGED
|
@@ -8,7 +8,16 @@ def pkg_root(start: Optional[Path] = None) -> tuple[Path, str, Path]:
|
|
|
8
8
|
for d in [here, *here.parents]:
|
|
9
9
|
pyproject = d / "pyproject.toml"
|
|
10
10
|
if pyproject.exists():
|
|
11
|
-
|
|
11
|
+
pkg_name = d.name
|
|
12
|
+
src_dir = d / "src"
|
|
13
|
+
if src_dir.exists():
|
|
14
|
+
candidates = [
|
|
15
|
+
p for p in src_dir.iterdir()
|
|
16
|
+
if p.is_dir() and (p / "__init__.py").exists()
|
|
17
|
+
]
|
|
18
|
+
if len(candidates) == 1:
|
|
19
|
+
pkg_name = candidates[0].name
|
|
20
|
+
return d, pkg_name, pyproject
|
|
12
21
|
print("[error] pyproject.toml not found (searched current and parent dirs)", file=sys.stderr)
|
|
13
22
|
raise SystemExit(1)
|
|
14
23
|
|
|
@@ -8,6 +8,7 @@ from datapipeline.services.scaffold.mapper import create_mapper
|
|
|
8
8
|
from datapipeline.services.scaffold.source_yaml import create_source_yaml
|
|
9
9
|
from datapipeline.services.scaffold.contract_yaml import write_ingest_contract
|
|
10
10
|
from datapipeline.services.scaffold.discovery import list_dtos
|
|
11
|
+
from datapipeline.services.paths import pkg_root
|
|
11
12
|
from datapipeline.services.scaffold.utils import error_exit, status
|
|
12
13
|
|
|
13
14
|
|
|
@@ -50,6 +51,17 @@ class StreamPlan:
|
|
|
50
51
|
|
|
51
52
|
|
|
52
53
|
def execute_stream_plan(plan: StreamPlan) -> None:
|
|
54
|
+
pyproject_path = None
|
|
55
|
+
before_pyproject = None
|
|
56
|
+
try:
|
|
57
|
+
root_dir, _, pyproject = pkg_root(plan.root)
|
|
58
|
+
pyproject_path = pyproject
|
|
59
|
+
if pyproject_path.exists():
|
|
60
|
+
before_pyproject = pyproject_path.read_text()
|
|
61
|
+
except SystemExit:
|
|
62
|
+
pyproject_path = None
|
|
63
|
+
before_pyproject = None
|
|
64
|
+
|
|
53
65
|
if plan.create_domain and plan.domain:
|
|
54
66
|
create_domain(domain=plan.domain, root=plan.root)
|
|
55
67
|
|
|
@@ -108,3 +120,10 @@ def execute_stream_plan(plan: StreamPlan) -> None:
|
|
|
108
120
|
mapper_entrypoint=mapper_ep or "identity",
|
|
109
121
|
)
|
|
110
122
|
status("ok", "Stream created.")
|
|
123
|
+
if pyproject_path and before_pyproject is not None:
|
|
124
|
+
after_pyproject = pyproject_path.read_text()
|
|
125
|
+
if after_pyproject != before_pyproject:
|
|
126
|
+
status(
|
|
127
|
+
"note",
|
|
128
|
+
f"Entry points updated; reinstall plugin: pip install -e {pyproject_path.parent}",
|
|
129
|
+
)
|
|
@@ -10,6 +10,9 @@ def {{FUNCTION_NAME}}(
|
|
|
10
10
|
**params: Any,
|
|
11
11
|
) -> Iterator[{{DOMAIN_RECORD}}]:
|
|
12
12
|
"""Map {{INPUT_CLASS}} records to domain-level {{DOMAIN_RECORD}} records."""
|
|
13
|
+
raise NotImplementedError(
|
|
14
|
+
"Implement mapper logic for {{INPUT_CLASS}} -> {{DOMAIN_RECORD}}"
|
|
15
|
+
)
|
|
13
16
|
for record in stream:
|
|
14
17
|
# TODO: construct {{DOMAIN_RECORD}} from record fields
|
|
15
18
|
yield {{DOMAIN_RECORD}}(
|
|
@@ -13,6 +13,7 @@ class {{CLASS_NAME}}({{PARENT_CLASS}}):
|
|
|
13
13
|
|
|
14
14
|
Add any additional fields you need for filtering/partitioning/grouping.
|
|
15
15
|
"""
|
|
16
|
+
# TODO: Add domain fields for filtering/partitioning/grouping.
|
|
16
17
|
# Example extra fields (uncomment and adapt):
|
|
17
18
|
# region: str # e.g. 'us-west', 'eu-central', etc.
|
|
18
19
|
# exchange: str # e.g. 'NASDAQ', 'NYSE', etc.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: jerry-thomas
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.1
|
|
4
4
|
Summary: Jerry-Thomas: a stream-first, plugin-friendly data pipeline (mixology-themed CLI)
|
|
5
5
|
Author: Anders Skott Lind
|
|
6
6
|
License: MIT
|
|
@@ -75,32 +75,19 @@ For example: `jerry serve --project lib/my-datapipeline/project.yaml`.
|
|
|
75
75
|
|
|
76
76
|
```bash
|
|
77
77
|
jerry plugin init my-datapipeline --out lib/
|
|
78
|
-
|
|
78
|
+
|
|
79
|
+
# Note: import paths use the package name (hyphens become underscores), e.g.
|
|
80
|
+
# `my_datapipeline` even if the dist folder is `my-datapipeline`.
|
|
79
81
|
|
|
80
82
|
# One-stop wizard: scaffolds source YAML + DTO/parser + domain + mapper + contract.
|
|
83
|
+
# See `docs/cli.md` for wizard tips and identity vs custom guidance.
|
|
81
84
|
jerry inflow create
|
|
82
85
|
|
|
83
|
-
# Wizard tips (identity vs custom)
|
|
84
|
-
#
|
|
85
|
-
# - Parser:
|
|
86
|
-
# - Choose "Identity parser" when the loader already yields dicts/objects that match your DTO shape,
|
|
87
|
-
# and no type conversion is needed.
|
|
88
|
-
# - Choose "Create new parser" when you need to parse timestamps, coerce types, rename fields,
|
|
89
|
-
# or drop/validate rows.
|
|
90
|
-
#
|
|
91
|
-
# - Mapper:
|
|
92
|
-
# - Choose "Identity mapper" only when your DTO already is the final domain record shape (for example you might have used jerry to output interim datasets):
|
|
93
|
-
# `time` is timezone-aware and identity fields are present.
|
|
94
|
-
# - Otherwise, choose "Create new mapper" to map DTO -> domain record and add light derived fields.
|
|
95
|
-
#
|
|
96
|
-
# After scaffolding, you typically still need to:
|
|
97
|
-
# - Fill placeholders in `sources/*.yaml` (path/url/headers/etc.).
|
|
98
|
-
# - Reference your stream contract id in `dataset.yaml` under `record_stream: <contract_id>` and pick a `field` for each feature.
|
|
99
|
-
#
|
|
100
86
|
# Reinstall after commands that update entry points (pyproject.toml).
|
|
101
87
|
python -m pip install -e lib/my-datapipeline
|
|
102
88
|
|
|
103
|
-
|
|
89
|
+
# -> fill in your templates generated by 'jerry inflow create' and get ready to serve
|
|
90
|
+
jerry serve --limit 3
|
|
104
91
|
```
|
|
105
92
|
|
|
106
93
|
---
|
|
@@ -32,7 +32,7 @@ datapipeline/cli/commands/run.py,sha256=TmbyggYOlF972oxwLhh-r27ggeWARg0_WfCMQJAu
|
|
|
32
32
|
datapipeline/cli/commands/run_config.py,sha256=Ix4N-Zn-leb6MaQ7A0cZfVu9-yOaNme5rg5qR0rWXZ4,3158
|
|
33
33
|
datapipeline/cli/commands/serve_pipeline.py,sha256=9kxdc2D1qtDy6L-8ron-UuHc9qTng6xhZ43l00JqoEU,5004
|
|
34
34
|
datapipeline/cli/commands/source.py,sha256=0mhzRT7J2Nu7HKNORsvoaEBVOBP9CmN1L1_JVNXV0q4,5904
|
|
35
|
-
datapipeline/cli/commands/stream.py,sha256=
|
|
35
|
+
datapipeline/cli/commands/stream.py,sha256=30hYLjoRV4xdyFbK146eXnbhMDC1p8HeXQKZ8TmcCTg,9835
|
|
36
36
|
datapipeline/cli/visuals/__init__.py,sha256=CUxCoMoU96FQonq6V_i_HBUwuwoWjML5X-_MZDF_i8M,371
|
|
37
37
|
datapipeline/cli/visuals/common.py,sha256=NFyYzJnWEpP6vuhRXl6LRBbGTMk_12Ya1ckBK7kzpls,10298
|
|
38
38
|
datapipeline/cli/visuals/labels.py,sha256=rfilkKeTna25ZnSw2UlQ7apMK0DeDvTIRUCkaAOkM_I,1748
|
|
@@ -91,7 +91,7 @@ datapipeline/pipeline/context.py,sha256=-W8QvGm32QGmBziEuzl-BitscuxGPb9bgQYDhRC1
|
|
|
91
91
|
datapipeline/pipeline/observability.py,sha256=BMuQazS9pKE559ew3ENsku03P8Fd79yA5ofSxHnF2xo,2007
|
|
92
92
|
datapipeline/pipeline/pipelines.py,sha256=lHcNb5n3l6OYgPOsdQnZJha6eHzSIDjUmV4HJQ0jEZc,6807
|
|
93
93
|
datapipeline/pipeline/split.py,sha256=TCzOhd8PF81IcUzUdPSz0hs3pIHi9V4IhXbSY2ZHK3Q,6090
|
|
94
|
-
datapipeline/pipeline/stages.py,sha256=
|
|
94
|
+
datapipeline/pipeline/stages.py,sha256=w_5Q6L2qpzBlHVeO9nWc3625jRgipwW4Ldl0PylmF8I,10816
|
|
95
95
|
datapipeline/pipeline/utils/keygen.py,sha256=v2JJagJAE9iYfLtbl4uxoAEXZN_ALH0xdHhPDhNfKwU,1909
|
|
96
96
|
datapipeline/pipeline/utils/memory_sort.py,sha256=hS61n2CeIITRqffE1ftvn2IdqQp1IXYhuN4MJqncKvk,1155
|
|
97
97
|
datapipeline/pipeline/utils/ordering.py,sha256=ZX18I7GFtvyMFJB96vWQpTOGwljjeJ6ppCg5a3Av3es,1450
|
|
@@ -102,7 +102,7 @@ datapipeline/services/artifacts.py,sha256=Ujc41F44_zcc6ndU3nXYXCQ-pWpNtBwNH5F2gX
|
|
|
102
102
|
datapipeline/services/constants.py,sha256=qdfIb1uc_ewWLtzDAGR0YvcUAqFCxb8IRoyzbp_j8ZI,570
|
|
103
103
|
datapipeline/services/entrypoints.py,sha256=NKcSbhGRtBLQXGf-TdujwbVSRH1zb5J-S2jxFPnk6HQ,2504
|
|
104
104
|
datapipeline/services/factories.py,sha256=WTEFiQ_IJaSG-yTLyi1CpQOrP_LoUEDpco6zYygjIxk,5868
|
|
105
|
-
datapipeline/services/paths.py,sha256=
|
|
105
|
+
datapipeline/services/paths.py,sha256=3wydhJoyFzVAGi_DnF1xjPuF7rECk9rDJzEi2e5_55c,1319
|
|
106
106
|
datapipeline/services/project_paths.py,sha256=_ThKuXxh8TJFoChm6zL4s3qDsIihMmQy4FM0Pm6wW9E,4228
|
|
107
107
|
datapipeline/services/runs.py,sha256=9HGbJYFtFt_on1F5nTKoxvK7NCYxz3a4xos1TLJfBXg,6149
|
|
108
108
|
datapipeline/services/bootstrap/__init__.py,sha256=Mc2w2S69kU1hnzCvsGMhFqyNoNMXPwQtxprAkGN-sYE,245
|
|
@@ -121,7 +121,7 @@ datapipeline/services/scaffold/mapper.py,sha256=-HmX-MvgOdCui31wLRUCu_gyo2JbAjBX
|
|
|
121
121
|
datapipeline/services/scaffold/parser.py,sha256=Fi5nsjYUSwgf4RR85iv2cdbhFfg6okOFaW0ysvWgJ6c,1545
|
|
122
122
|
datapipeline/services/scaffold/plugin.py,sha256=0RC9C3m2IZsUm2hy85W1ce4MstR63x64imEZ7gv5FDo,3845
|
|
123
123
|
datapipeline/services/scaffold/source_yaml.py,sha256=mMLeVsY7SNefFP32d5eXxUGKwIwLzSMNvm0faWgUSKw,2849
|
|
124
|
-
datapipeline/services/scaffold/stream_plan.py,sha256=
|
|
124
|
+
datapipeline/services/scaffold/stream_plan.py,sha256=fKAKLtOrlitACVDDlVpKCeTxnG1VlQlLrah1LVMeWl8,4496
|
|
125
125
|
datapipeline/services/scaffold/templates.py,sha256=B3YnZpFUZLynijJosTNxZQLXnPP_Y_t1RHqfI1lGOxU,634
|
|
126
126
|
datapipeline/services/scaffold/utils.py,sha256=y6drKUjU1rgVXHfMBashrpgfcKUEw620O43hDF3WOg0,5154
|
|
127
127
|
datapipeline/sources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -197,7 +197,7 @@ datapipeline/templates/plugin_skeleton/src/{{PACKAGE_NAME}}/mappers/__init__.py,
|
|
|
197
197
|
datapipeline/templates/plugin_skeleton/src/{{PACKAGE_NAME}}/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
198
198
|
datapipeline/templates/plugin_skeleton/your-dataset/dataset.yaml,sha256=js877ylqGFJBS7adzGVpFsXFVzofhEWLnuyKgF_YnwY,408
|
|
199
199
|
datapipeline/templates/plugin_skeleton/your-dataset/postprocess.yaml,sha256=MFbgkHZ-MYKk_hYoEARE5ot9dVVb1w4D-jIvTYW8fxs,337
|
|
200
|
-
datapipeline/templates/plugin_skeleton/your-dataset/project.yaml,sha256=
|
|
200
|
+
datapipeline/templates/plugin_skeleton/your-dataset/project.yaml,sha256=zt1QIkgNij546vocQnOzmOaCwPapLuIfv_mFnaCY4-U,497
|
|
201
201
|
datapipeline/templates/plugin_skeleton/your-dataset/tasks/metadata.yaml,sha256=bCjlBbQNhRMjH9XuFXqdhMjQLRFqyFnKeSITHCNL9og,95
|
|
202
202
|
datapipeline/templates/plugin_skeleton/your-dataset/tasks/scaler.yaml,sha256=E4WJTjAbZ4cC4lhADGVft8YxwiiaRWzl3FeaNJ0_mAo,110
|
|
203
203
|
datapipeline/templates/plugin_skeleton/your-dataset/tasks/schema.yaml,sha256=nqZPeWCpONnTXR2wOgcPz9EFr8V5r4pSg318XRfpZuM,91
|
|
@@ -206,20 +206,20 @@ datapipeline/templates/plugin_skeleton/your-dataset/tasks/serve.train.yaml,sha25
|
|
|
206
206
|
datapipeline/templates/plugin_skeleton/your-dataset/tasks/serve.val.yaml,sha256=8KSHCzSBN0-_YZocSR8gNi0zYCNZnEUAOj4st5GOvUs,109
|
|
207
207
|
datapipeline/templates/plugin_skeleton/your-interim-data-builder/dataset.yaml,sha256=3VQn-1aox7dh7lE648m2z8isoqVl9n-Vq35xfNRLwtM,239
|
|
208
208
|
datapipeline/templates/plugin_skeleton/your-interim-data-builder/postprocess.yaml,sha256=N1F-Xz3GaBn2H1p7uKzhkhKCQV8QVR0t76XD6wmFtXA,3
|
|
209
|
-
datapipeline/templates/plugin_skeleton/your-interim-data-builder/project.yaml,sha256=
|
|
209
|
+
datapipeline/templates/plugin_skeleton/your-interim-data-builder/project.yaml,sha256=nGzWBDlLhSGX_Nn7KKc55DM3NxNmWCPwQ1aRV0Pe2Dw,410
|
|
210
210
|
datapipeline/templates/plugin_skeleton/your-interim-data-builder/tasks/serve.all.yaml,sha256=TdqwfDpGn-byv23d0pQV_GD38hTIzUe0G2Z1ffwSR0Y,187
|
|
211
|
-
datapipeline/templates/stubs/dto.py.j2,sha256=
|
|
212
|
-
datapipeline/templates/stubs/filter.py.j2,sha256=
|
|
211
|
+
datapipeline/templates/stubs/dto.py.j2,sha256=iayNfcsfkdL5NE58erk8LL25Kf6FMlHzbdfYbgoehCA,841
|
|
212
|
+
datapipeline/templates/stubs/filter.py.j2,sha256=OE_HT4uxC8Fl928tuWwDZ-_QjLXNXWhVUq42DWkqaBc,504
|
|
213
213
|
datapipeline/templates/stubs/loader_synthetic.py.j2,sha256=9SQBeTBGlZmKs6nSYBKd8nbOPcFHgDx17Mh8xOEQnvs,1285
|
|
214
|
-
datapipeline/templates/stubs/parser.py.j2,sha256=
|
|
214
|
+
datapipeline/templates/stubs/parser.py.j2,sha256=LiTjv_CzdnE_M7_6Q7adVFIpX8-Vyg-ogM9vrPxHQNs,804
|
|
215
215
|
datapipeline/templates/stubs/parser_custom.py.j2,sha256=0Nytq43JdTZoyRj-4Mz6HWdMTmOP3VlFuYOB_A_13Vg,580
|
|
216
|
-
datapipeline/templates/stubs/record.py.j2,sha256=
|
|
216
|
+
datapipeline/templates/stubs/record.py.j2,sha256=xB0KicFZTjU-pzKiuwV4O1QBSNPvHnehwaKPnYdwAZo,637
|
|
217
217
|
datapipeline/templates/stubs/source.yaml.j2,sha256=iMWN915cJfz-KzvEqHQgFdkWxf7kdTsPzClb2JvXE8Q,420
|
|
218
218
|
datapipeline/templates/stubs/contracts/composed.yaml.j2,sha256=n-t4w-eziZP6vVBVI_TwImSJWzdVJxQch9-OdAvPpuI,285
|
|
219
219
|
datapipeline/templates/stubs/contracts/ingest.yaml.j2,sha256=P6GoItw_lSVnadPE8Tvp6SwAgjCbH7wkD87Jhnl1OcY,1045
|
|
220
220
|
datapipeline/templates/stubs/loaders/basic.py.j2,sha256=fj9gm9iSzCxwDB6FLlN68G5AR-vm2dRJqLYCLFXW6Wo,303
|
|
221
221
|
datapipeline/templates/stubs/mappers/composed.py.j2,sha256=fWm2xNUZ_KFGJQc0izF2zJ9JZ8o4L1TDJtMI05KqKjc,522
|
|
222
|
-
datapipeline/templates/stubs/mappers/ingest.py.j2,sha256=
|
|
222
|
+
datapipeline/templates/stubs/mappers/ingest.py.j2,sha256=6DR35on0VkyaGm3yWpQXUK7MaaPuYnnRyPSbbFjiiB4,655
|
|
223
223
|
datapipeline/transforms/filter.py,sha256=6BEwEy3_vlQ0mU00MkYkpgDVZrgUXSDcQ--BZANlNUw,2867
|
|
224
224
|
datapipeline/transforms/interfaces.py,sha256=rbq5vosNOCMqXOYyPrOX2PZVV-kmnwcaRMJUKANG9mk,1617
|
|
225
225
|
datapipeline/transforms/sequence.py,sha256=mhj5qkD2nUtZ2Kkfrm9ogYh8Mlmv3fDKf-aphB4-gOo,1639
|
|
@@ -256,9 +256,9 @@ datapipeline/utils/placeholders.py,sha256=SAR8G37DFesznu4T87lD9Bvut5aCmriZBQNdfu
|
|
|
256
256
|
datapipeline/utils/rich_compat.py,sha256=GwQGoHEXiX8wq-crYMXdDqbkKreHQnFK-6E8e8FYaCU,1129
|
|
257
257
|
datapipeline/utils/time.py,sha256=vOqa2arqwEqbDo-JWEhOFPMnI1E4Ib3i1L-Rt-cGH8c,1072
|
|
258
258
|
datapipeline/utils/window.py,sha256=g8hR_7IiLKVywlpPK2xhHx9QTOlNMiQuJrTL96Zs8gg,2540
|
|
259
|
-
jerry_thomas-2.0.
|
|
260
|
-
jerry_thomas-2.0.
|
|
261
|
-
jerry_thomas-2.0.
|
|
262
|
-
jerry_thomas-2.0.
|
|
263
|
-
jerry_thomas-2.0.
|
|
264
|
-
jerry_thomas-2.0.
|
|
259
|
+
jerry_thomas-2.0.1.dist-info/licenses/LICENSE,sha256=pkBMylAJF5yChHAkdxwFhEptLGx13i-XFEKh-Sh6DkM,1073
|
|
260
|
+
jerry_thomas-2.0.1.dist-info/METADATA,sha256=n9xxlQzghlj5OhtSTDhRNnBoCkFhOaePFm28DnvPZTU,13554
|
|
261
|
+
jerry_thomas-2.0.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
262
|
+
jerry_thomas-2.0.1.dist-info/entry_points.txt,sha256=BCvH4YYRe4ts9VSYjgm4x1-2CpKCkLLVKOD5ZU0AQzU,2020
|
|
263
|
+
jerry_thomas-2.0.1.dist-info/top_level.txt,sha256=N8aoNPdPyHefODO4YAm7tqTaUcw0e8LDcqycFTf8TbM,13
|
|
264
|
+
jerry_thomas-2.0.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|