salesforce-data-customcode 0.1.0__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/PKG-INFO +1 -1
  2. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/pyproject.toml +4 -7
  3. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/cli.py +13 -1
  4. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/deploy.py +57 -78
  5. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/scan.py +21 -12
  6. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/Dockerfile +2 -2
  7. salesforce_data_customcode-0.1.2/src/datacustomcode/templates/payload/entrypoint.py +25 -0
  8. salesforce_data_customcode-0.1.2/src/datacustomcode/version.py +27 -0
  9. salesforce_data_customcode-0.1.0/src/datacustomcode/templates/payload/entrypoint.py +0 -10
  10. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/LICENSE.txt +0 -0
  11. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/README.md +0 -0
  12. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/__init__.py +0 -0
  13. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/client.py +0 -0
  14. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/cmd.py +0 -0
  15. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/config.py +0 -0
  16. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/config.yaml +0 -0
  17. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/credentials.py +0 -0
  18. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/__init__.py +0 -0
  19. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/base.py +0 -0
  20. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/reader/__init__.py +0 -0
  21. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/reader/base.py +0 -0
  22. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/reader/query_api.py +0 -0
  23. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/writer/__init__.py +0 -0
  24. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/writer/base.py +0 -0
  25. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/writer/csv.py +0 -0
  26. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/io/writer/print.py +0 -0
  27. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/mixin.py +0 -0
  28. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/py.typed +0 -0
  29. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/run.py +0 -0
  30. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/template.py +0 -0
  31. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/.devcontainer/devcontainer.json +0 -0
  32. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/README.md +0 -0
  33. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/jupyterlab.sh +0 -0
  34. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/payload/config.json +0 -0
  35. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/requirements-dev.txt +0 -0
  36. {salesforce_data_customcode-0.1.0 → salesforce_data_customcode-0.1.2}/src/datacustomcode/templates/requirements.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: salesforce-data-customcode
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: Data Cloud Custom Code SDK
5
5
  License: Apache-2.0
6
6
  Requires-Python: >=3.10,<3.12
@@ -1,9 +1,6 @@
1
1
  [build-system]
2
2
  build-backend = "poetry_dynamic_versioning.backend"
3
- requires = [
4
- "poetry-core>=2.0.0",
5
- "poetry-dynamic-versioning>=1.0.0,<2.0.0"
6
- ]
3
+ requires = ["poetry-core>=2.0.0", "poetry-dynamic-versioning>=1.0.0,<2.0.0"]
7
4
 
8
5
  [project]
9
6
  authors = []
@@ -16,12 +13,12 @@ classifiers = [
16
13
  "Programming Language :: Python :: 3.11"
17
14
  ]
18
15
  description = "Data Cloud Custom Code SDK"
19
- dynamic = ["version"]
16
+ dynamic = []
20
17
  license = "Apache-2.0"
21
18
  name = "salesforce-data-customcode"
22
19
  readme = "README.md"
23
20
  requires-python = ">=3.10,<3.12"
24
- version = "0.1.0"
21
+ version = "0.1.2"
25
22
 
26
23
  [tool.black]
27
24
  exclude = '''
@@ -132,7 +129,7 @@ poetry-dynamic-versioning = {version = ">=1.0.0,<2.0.0", extras = ["plugin"]}
132
129
  datacustomcode = "datacustomcode.cli:cli"
133
130
 
134
131
  [tool.poetry-dynamic-versioning]
135
- enable = true
132
+ enable = false
136
133
  pattern = "^v(?P<base>.+)$"
137
134
  style = "semver"
138
135
  vcs = "git"
@@ -101,13 +101,25 @@ def deploy(profile: str, path: str, name: str, version: str, description: str):
101
101
  @cli.command()
102
102
  @click.argument("directory", default=".")
103
103
  def init(directory: str):
104
+ from datacustomcode.scan import dc_config_json_from_file
104
105
  from datacustomcode.template import copy_template
105
106
 
106
107
  click.echo("Copying template to " + click.style(directory, fg="blue", bold=True))
107
108
  copy_template(directory)
109
+ entrypoint_path = os.path.join(directory, "payload", "entrypoint.py")
110
+ config_location = os.path.join(os.path.dirname(entrypoint_path), "config.json")
111
+ config_json = dc_config_json_from_file(entrypoint_path)
112
+ with open(config_location, "w") as f:
113
+ json.dump(config_json, f, indent=2)
114
+
108
115
  click.echo(
109
116
  "Start developing by updating the code in "
110
- + click.style(f"{directory}/payload/entrypoint.py", fg="blue", bold=True)
117
+ + click.style(entrypoint_path, fg="blue", bold=True)
118
+ )
119
+ click.echo(
120
+ "You can run "
121
+ + click.style(f"datacustomcode scan {entrypoint_path}", fg="blue", bold=True)
122
+ + " to automatically update config.json when you make changes to your code"
111
123
  )
112
124
 
113
125
 
@@ -31,11 +31,11 @@ from typing import (
31
31
  )
32
32
 
33
33
  from loguru import logger
34
+ import pydantic
34
35
  from pydantic import BaseModel
35
36
  import requests
36
37
 
37
38
  from datacustomcode.cmd import cmd_output
38
- from datacustomcode.scan import scan_file
39
39
 
40
40
  if TYPE_CHECKING:
41
41
  from datacustomcode.credentials import Credentials
@@ -78,8 +78,10 @@ def _make_api_call(
78
78
  logger.debug(f"Request params: {kwargs}")
79
79
 
80
80
  response = requests.request(method=method, url=url, headers=headers, **kwargs)
81
- response.raise_for_status()
82
81
  json_response = response.json()
82
+ if response.status_code >= 400:
83
+ logger.debug(f"Error Response: {json_response}")
84
+ response.raise_for_status()
83
85
  assert isinstance(
84
86
  json_response, dict
85
87
  ), f"Unexpected response type: {type(json_response)}"
@@ -224,51 +226,17 @@ def wait_for_deployment(
224
226
  callback(status)
225
227
  if status == "Deployed":
226
228
  logger.debug(
227
- "Deployment completed, Elapsed time: {time.time() - start_time}"
229
+ f"Deployment completed.\nElapsed time: {time.time() - start_time}"
228
230
  )
229
231
  break
230
232
  time.sleep(1)
231
233
 
232
234
 
233
235
  DATA_TRANSFORM_REQUEST_TEMPLATE: dict[str, Any] = {
234
- "metadata": {
235
- "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v8.json",
236
- "dbt_version": "1.4.6",
237
- "generated_at": "2023-04-25T18:54:11.375589Z",
238
- "invocation_id": "d6c68c69-533a-4d54-861e-1493d6cd8092",
239
- "env": {},
240
- "project_id": "jaffle_shop",
241
- "user_id": "1ca8403c-a1a5-43af-8b88-9265e948b9d2",
242
- "send_anonymous_usage_stats": True,
243
- "adapter_type": "spark",
244
- },
245
- "nodes": {
246
- "model.dcexample.dim_listings_w_hosts": {
247
- "name": "dim_listings_w_hosts",
248
- "resource_type": "model",
249
- "relation_name": "{OUTPUT_DLO}",
250
- "config": {"materialized": "table"},
251
- "compiled_code": "",
252
- "depends_on": {"nodes": []},
253
- }
254
- },
255
- "sources": {
256
- "source.dcexample.listings": {
257
- "name": "listings",
258
- "resource_type": "source",
259
- "relation_name": "{INPUT_DLO}",
260
- "identifier": "{INPUT_DLO}",
261
- }
262
- },
236
+ "nodes": {},
237
+ "sources": {},
263
238
  "macros": {
264
- "macro.dcexample.byoc": {
265
- "name": "byoc_example",
266
- "resource_type": "macro",
267
- "path": "",
268
- "original_file_path": "",
269
- "unique_id": "unique id",
270
- "macro_sql": "",
271
- "supported_languages": None,
239
+ "macro.byoc": {
272
240
  "arguments": [{"name": "{SCRIPT_NAME}", "type": "BYOC_SCRIPT"}],
273
241
  }
274
242
  },
@@ -276,36 +244,44 @@ DATA_TRANSFORM_REQUEST_TEMPLATE: dict[str, Any] = {
276
244
 
277
245
 
278
246
  class DataTransformConfig(BaseModel):
279
- input: Union[str, list[str]]
280
- output: Union[str, list[str]]
247
+ sdkVersion: str
248
+ entryPoint: str
249
+ dataspace: str
250
+ permissions: Permissions
281
251
 
282
252
 
283
- DATA_TRANSFORM_CONFIG_TEMPLATE: dict[str, Any] = {
284
- "entryPoint": "entrypoint.py",
285
- "dataspace": "default",
286
- "permissions": {"read": {"dlo": ""}, "write": {"dlo": ""}},
287
- }
253
+ class Permissions(BaseModel):
254
+ read: Union[DloPermission]
255
+ write: Union[DloPermission]
288
256
 
289
257
 
290
- def get_data_transform_config(directory: str) -> DataTransformConfig:
291
- """Get the data transform config from the entrypoint.py file."""
292
- entrypoint_file = os.path.join(directory, "entrypoint.py")
293
- data_access_layer_calls = scan_file(entrypoint_file)
294
- input_ = data_access_layer_calls.input_str
295
- output = data_access_layer_calls.output_str
296
- return DataTransformConfig(input=input_, output=output)
258
+ class DloPermission(BaseModel):
259
+ dlo: list[str]
297
260
 
298
261
 
299
- def create_data_transform_config(directory: str) -> None:
300
- """Create a data transform config.json file in the directory."""
301
- data_transform_config = get_data_transform_config(directory)
302
- request_hydrated = DATA_TRANSFORM_CONFIG_TEMPLATE.copy()
303
- request_hydrated["permissions"]["read"]["dlo"] = data_transform_config.input
304
- request_hydrated["permissions"]["write"]["dlo"] = data_transform_config.output
305
- logger.debug(f"Creating data transform config in {directory}")
306
- json.dump(
307
- request_hydrated, open(os.path.join(directory, "config.json"), "w"), indent=4
308
- )
262
+ def get_data_transform_config(directory: str) -> DataTransformConfig:
263
+ """Get the data transform config from the config.json file."""
264
+ config_path = os.path.join(directory, "config.json")
265
+ try:
266
+ with open(config_path, "r") as f:
267
+ config = json.loads(f.read())
268
+ return DataTransformConfig(**config)
269
+ except FileNotFoundError as err:
270
+ raise FileNotFoundError(f"config.json not found at {config_path}") from err
271
+ except json.JSONDecodeError as err:
272
+ raise ValueError(f"config.json at {config_path} is not valid JSON") from err
273
+ except pydantic.ValidationError as err:
274
+ missing_fields = [str(err["loc"][0]) for err in err.errors()]
275
+ raise ValueError(
276
+ f"config.json at {config_path} is missing required "
277
+ f"fields: {', '.join(missing_fields)}"
278
+ ) from err
279
+
280
+
281
+ def verify_data_transform_config(directory: str) -> None:
282
+ """Verify the data transform config.json contents."""
283
+ get_data_transform_config(directory)
284
+ logger.debug(f"Verified data transform config in {directory}")
309
285
 
310
286
 
311
287
  def create_data_transform(
@@ -317,28 +293,31 @@ def create_data_transform(
317
293
  script_name = metadata.name
318
294
  data_transform_config = get_data_transform_config(directory)
319
295
  request_hydrated = DATA_TRANSFORM_REQUEST_TEMPLATE.copy()
320
- request_hydrated["nodes"]["model.dcexample.dim_listings_w_hosts"][
321
- "relation_name"
322
- ] = data_transform_config.input
323
- request_hydrated["sources"]["source.dcexample.listings"][
324
- "relation_name"
325
- ] = data_transform_config.output
326
- request_hydrated["sources"]["source.dcexample.listings"][
327
- "identifier"
328
- ] = data_transform_config.output
329
- request_hydrated["macros"]["macro.dcexample.byoc"]["arguments"][0][
330
- "name"
331
- ] = script_name
296
+
297
+ # Add nodes for each write DLO
298
+ for i, dlo in enumerate(data_transform_config.permissions.write.dlo, 1):
299
+ request_hydrated["nodes"][f"node{i}"] = {
300
+ "relation_name": dlo,
301
+ "config": {"materialized": "table"},
302
+ "compiled_code": "",
303
+ }
304
+
305
+ # Add sources for each read DLO
306
+ for i, dlo in enumerate(data_transform_config.permissions.read.dlo, 1):
307
+ request_hydrated["sources"][f"source{i}"] = {"relation_name": dlo}
308
+
309
+ request_hydrated["macros"]["macro.byoc"]["arguments"][0]["name"] = script_name
332
310
 
333
311
  body = {
334
312
  "definition": {
335
- "type": "DBT",
313
+ "type": "DCSQL",
336
314
  "manifest": request_hydrated,
337
315
  "version": "56.0",
338
316
  },
339
317
  "label": f"{metadata.name}",
340
318
  "name": f"{metadata.name}",
341
319
  "type": "BATCH",
320
+ "dataSpaceName": data_transform_config.dataspace,
342
321
  }
343
322
 
344
323
  url = _join_strip_url(access_token.instance_url, DATA_TRANSFORMS_PATH)
@@ -357,7 +336,7 @@ def deploy_full(
357
336
 
358
337
  # prepare payload
359
338
  prepare_dependency_archive(directory)
360
- create_data_transform_config(directory)
339
+ verify_data_transform_config(directory)
361
340
 
362
341
  # create deployment and upload payload
363
342
  deployment = create_deployment(access_token, metadata)
@@ -16,15 +16,27 @@ from __future__ import annotations
16
16
 
17
17
  import ast
18
18
  from typing import (
19
+ Any,
19
20
  Dict,
20
- List,
21
21
  Union,
22
22
  )
23
23
 
24
24
  import pydantic
25
25
 
26
+ from datacustomcode.version import get_version
27
+
26
28
  DATA_ACCESS_METHODS = ["read_dlo", "read_dmo", "write_to_dlo", "write_to_dmo"]
27
29
 
30
+ DATA_TRANSFORM_CONFIG_TEMPLATE = {
31
+ "sdkVersion": get_version(),
32
+ "entryPoint": "",
33
+ "dataspace": "default",
34
+ "permissions": {
35
+ "read": {},
36
+ "write": {},
37
+ },
38
+ }
39
+
28
40
 
29
41
  class DataAccessLayerCalls(pydantic.BaseModel):
30
42
  read_dlo: frozenset[str]
@@ -129,25 +141,22 @@ def scan_file(file_path: str) -> DataAccessLayerCalls:
129
141
  return visitor.found()
130
142
 
131
143
 
132
- def dc_config_json_from_file(file_path: str) -> dict:
144
+ def dc_config_json_from_file(file_path: str) -> dict[str, Any]:
133
145
  """Create a Data Cloud Custom Code config JSON from a script."""
134
146
  output = scan_file(file_path)
135
- read = {}
147
+ config = DATA_TRANSFORM_CONFIG_TEMPLATE.copy()
148
+ config["entryPoint"] = file_path.rpartition("/")[-1]
149
+
150
+ read: dict[str, list[str]] = {}
136
151
  if output.read_dlo:
137
152
  read["dlo"] = list(output.read_dlo)
138
153
  else:
139
154
  read["dmo"] = list(output.read_dmo)
140
- write = {}
155
+ write: dict[str, list[str]] = {}
141
156
  if output.write_to_dlo:
142
157
  write["dlo"] = list(output.write_to_dlo)
143
158
  else:
144
159
  write["dmo"] = list(output.write_to_dmo)
145
- config: Dict[str, Union[str, Dict[str, Dict[str, List[str]]]]] = {
146
- "entryPoint": file_path.rpartition("/")[-1],
147
- "dataspace": "default",
148
- "permissions": {
149
- "read": read,
150
- "write": write,
151
- },
152
- }
160
+
161
+ config["permissions"] = {"read": read, "write": write}
153
162
  return config
@@ -6,11 +6,11 @@ ENV ENVIRONMENT=Outside_DataCloud
6
6
 
7
7
  # install from dev requirements.txt
8
8
  COPY requirements-dev.txt ./requirements-dev.txt
9
- RUN pip3 install --no-cache-dir -r requirements-dev.txt
9
+ RUN pip3.11 install --no-cache-dir -r requirements-dev.txt
10
10
 
11
11
  # Install from requirements.txt:
12
12
  COPY requirements.txt ./requirements.txt
13
- RUN pip3 install --no-cache-dir -r requirements.txt
13
+ RUN pip3.11 install --no-cache-dir -r requirements.txt
14
14
 
15
15
  # Create workspace directory
16
16
  RUN mkdir /workspace
@@ -0,0 +1,25 @@
1
+ from pyspark.sql.functions import col, upper
2
+
3
+ from datacustomcode.client import Client
4
+ from datacustomcode.io.writer.base import WriteMode
5
+
6
+
7
+ def main():
8
+ client = Client()
9
+
10
+ df = client.read_dlo("Account_Home__dll")
11
+
12
+ # Perform transformations on the DataFrame
13
+ df_upper1 = df.withColumn("Description__c", upper(col("Description__c")))
14
+
15
+ # Drop specific columns related to relationships
16
+ df_upper1 = df_upper1.drop("KQ_ParentId__c")
17
+ df_upper1 = df_upper1.drop("KQ_Id__c")
18
+
19
+ # Save the transformed DataFrame
20
+ dlo_name = "Account_Home_copy__dll"
21
+ client.write_to_dlo(dlo_name, df_upper1, write_mode=WriteMode.APPEND)
22
+
23
+
24
+ if __name__ == "__main__":
25
+ main()
@@ -0,0 +1,27 @@
1
+ # Copyright (c) 2025, Salesforce, Inc.
2
+ # SPDX-License-Identifier: Apache-2
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Version information for the Data Cloud Custom Code SDK."""
16
+
17
+ import importlib.metadata
18
+
19
+
20
+ def get_version() -> str:
21
+ """Get the current version of the SDK.
22
+
23
+ Returns:
24
+ str: The version string from package metadata.
25
+ """
26
+ # First try to get version from installed package metadata
27
+ return importlib.metadata.version("salesforce-data-customcode")
@@ -1,10 +0,0 @@
1
- from datacustomcode.client import Client
2
-
3
-
4
- def main():
5
- client = Client() # noqa: F841
6
- # TODO: Add your custom code here
7
-
8
-
9
- if __name__ == "__main__":
10
- main()