lam-cli 0.0.5__tar.gz → 0.0.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lam-cli
3
- Version: 0.0.5
3
+ Version: 0.0.7
4
4
  Summary: Laminar data transformation tool
5
5
  Home-page: https://github.com/laminar-run/lam
6
6
  Author: Laminar Run, Inc.
@@ -9,6 +9,7 @@ License: GPLv3
9
9
  License-File: LICENSE
10
10
  Requires-Dist: click
11
11
  Requires-Dist: posthog
12
+ Requires-Dist: logtail-python
12
13
 
13
14
 
14
15
  Laminar is a platform that makes building and maintaining API integrations faster.
@@ -72,4 +72,40 @@ Make sure to update the `requirements.txt` file when adding new dependencies.
72
72
  ```bash
73
73
  pip3 install <package>
74
74
  pip3 freeze > requirements.txt
75
- ```
75
+ ```
76
+
77
+ ## Releases
78
+
79
+ PyPI CI handles most stuff. Sometimes it'll push the test version to test PyPI which will cause the CI to "fail" but really we just need to make sure that the PyPI version on the proper distro site is correct [https://pypi.org/project/lam-cli/](https://pypi.org/project/lam-cli/).
80
+
81
+ Before releasing make sure to update the package version in `setup.py`.
82
+
83
+ ```python
84
+ setup(
85
+ name="lam-cli",
86
+ version="0.0.<x>",
87
+ ...
88
+ )
89
+ ```
90
+
91
+ Don't add the `-<increment>` to the version number in `setup.py`. This is added when creating a tag.
92
+
93
+ You can release with:
94
+
95
+ ```bash
96
+ git tag v<version>-<increment>
97
+
98
+ # Example
99
+ git tag v0.0.1-1
100
+ ```
101
+
102
+ Then push the tag.
103
+
104
+ ```bash
105
+ git push origin v<version>-<increment>
106
+
107
+ # Example
108
+ git push origin v0.0.1-1
109
+ ```
110
+
111
+ The CI will handle the rest!
@@ -9,52 +9,82 @@ import subprocess
9
9
  from datetime import datetime
10
10
 
11
11
  import click
12
+ from logtail import LogtailHandler
13
+ from posthog import Posthog
14
+
15
+ posthog = Posthog(project_api_key='phc_wfeHFG0p5yZIdBpjVYy00o5x1HbEpggdMzIuFYgNPSK', host='https://app.posthog.com')
16
+
17
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
18
+ logger = logging.getLogger(__name__)
19
+
20
+ handler = LogtailHandler(source_token="TYz3WrrvC8ehYjXdAEGGyiDp")
21
+ logger.addHandler(handler)
12
22
 
13
23
  jq_path = 'jq'
14
24
 
25
+ def truncate_long_strings(data, max_length=1000, truncation_msg="... (truncated)"):
26
+ """
27
+ Truncate long strings in a JSON object if they exceed max_length.
28
+ Append a message to indicate truncation.
29
+ """
30
+ if isinstance(data, dict):
31
+ return {key: truncate_long_strings(value, max_length, truncation_msg) for key, value in data.items()}
32
+ elif isinstance(data, list):
33
+ return [truncate_long_strings(item, max_length, truncation_msg) for item in data]
34
+ elif isinstance(data, str):
35
+ return data[:max_length] + truncation_msg if len(data) > max_length else data
36
+ return data
37
+
15
38
  def generate_distinct_id(workspace_id, flow_id):
16
39
  user_id = os.getuid()
17
40
  hostname = socket.gethostname()
18
41
  return f"{user_id}_{hostname}_{workspace_id}_{flow_id}"
19
42
 
20
43
  def track_event(event_name, properties, workspace_id="local", flow_id="local"):
21
- logging.info(f"Event {event_name} triggered, with properties: {properties}")
44
+ logger.info(f"Event {event_name} triggered, with properties: {properties}")
22
45
 
23
- def parse_program(program):
24
- logging.info(f"Parsing program: {program}")
46
+ try:
47
+ distinct_id = generate_distinct_id(workspace_id, flow_id)
48
+ posthog.capture(distinct_id=distinct_id, event=event_name, properties=properties)
49
+ except Exception as e:
50
+ logger.error(f"Error logging event: {e}")
25
51
 
26
- split_by_lines = program.split('\n')
27
- return ''.join(line for line in split_by_lines if line and not line.strip().startswith('#'))
52
+ def parse_program_file(program_file):
53
+ logger.info(f"Parsing program file: {program_file}")
54
+ with open(program_file, 'r') as file:
55
+ return ''.join(line for line in file if not line.strip().startswith('#'))
28
56
 
29
57
  def run_jq(jq_script, input_data):
30
- logging.info(f"Running jq script {jq_script} with input data {input_data}")
58
+ logger.info(f"Running jq script {jq_script} with input data {truncate_long_strings(input_data)}")
31
59
  process = subprocess.Popen([jq_path, '-c', jq_script], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
32
60
  output, error = process.communicate(input=input_data)
33
61
  if error:
34
- logging.error(f"Error running jq: {error}")
62
+ logger.error(f"Error running jq: {error}")
35
63
  return output, error
36
64
 
37
- def process_input(input_json, workspace_id, flow_id):
38
- logging.info(f"Processing input_json: {input_json}")
65
+ def process_input(input, workspace_id, flow_id):
66
+ logger.info(f"Processing input: {truncate_long_strings(input)}")
67
+ if os.path.isfile(input):
68
+ with open(input, 'r') as file:
69
+ return file.read(), None
39
70
  try:
40
- json.loads(input_json)
41
- return input_json, None
71
+ json.loads(input)
72
+ return input, None
42
73
  except json.JSONDecodeError as e:
43
- logging.error(f"Invalid JSON input: {e}")
74
+ logger.error(f"Invalid JSON input: {e}")
44
75
  track_event('lam.run.error', {'error': f"Invalid JSON input: {e}", 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
45
76
  return None, str(e)
46
77
 
47
78
  def handle_jq_output(output, as_json, workspace_id, flow_id):
48
- logging.info(f"Handling jq output: {output}")
79
+ logger.info(f"Handling jq output: {truncate_long_strings(output)}")
49
80
  try:
50
81
  json_output = json.loads(output)
51
- # Make sure the output has a top-level object
52
82
  if not isinstance(json_output, dict):
53
83
  track_event('lam.run.warn', {'error': 'Invalid JSON output', 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
54
84
  return {"lam.result": json_output} if as_json else output, None
55
85
  return json_output if as_json else output, None
56
86
  except json.JSONDecodeError as e:
57
- logging.error("Failed to parse JSON output, may be multiple JSON objects. Attempting to parse as JSON lines.")
87
+ logger.error("Failed to parse JSON output, may be multiple JSON objects. Attempting to parse as JSON lines.")
58
88
  track_event('lam.run.warn', {'error': f"Invalid JSON output: {e}", 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
59
89
  if as_json:
60
90
  json_objects = [json.loads(line) for line in output.strip().split('\n') if line]
@@ -70,38 +100,39 @@ def lam():
70
100
  pass
71
101
 
72
102
  @lam.command()
73
- @click.argument('program', type=str)
74
- @click.argument('input_json', type=str)
103
+ @click.argument('program_file', type=click.Path(exists=True))
104
+ @click.argument('input', type=str)
75
105
  @click.option('--workspace_id', default="local", help="Workspace ID")
76
106
  @click.option('--flow_id', default="local", help="Flow ID")
107
+ @click.option('--execution_id', default="local", help="Execution ID")
77
108
  @click.option('--as-json', is_flag=True, default=True, help="Output as JSON")
78
- def run(program, input_json, workspace_id, flow_id, as_json):
109
+ def run(program_file, input, workspace_id, flow_id, execution_id, as_json):
79
110
  timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
80
- log_file = f"lam_run_{workspace_id}_{flow_id}_{timestamp}.log"
81
- result_file = f"lam_result_{workspace_id}_{flow_id}_{timestamp}.json"
111
+ log_file = f"lam_run_{workspace_id}_{flow_id}_{execution_id}_{timestamp}.log"
112
+ result_file = f"lam_result_{workspace_id}_{flow_id}_{execution_id}_{timestamp}.json"
82
113
 
83
- # Now configure logging with the determined log file name
84
- logging.basicConfig(level=logging.INFO, filename=log_file, filemode='w',
85
- format='%(asctime)s - %(levelname)s - %(message)s')
114
+ file_handler = logging.FileHandler(log_file, 'w')
115
+ file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
116
+ logger.addHandler(file_handler)
86
117
 
87
- logging.info(f"Logging to {log_file}")
88
- logging.info(f"Running command with program: {program}, input_json: {input_json}, workspace_id: {workspace_id}, flow_id: {flow_id}, as_json: {as_json}")
118
+ logger.info(f"Logging to {log_file}")
119
+ logger.info(f"Running command with program file: {program_file}, input: {truncate_long_strings(input)}, workspace_id: {workspace_id}, flow_id: {flow_id}, as_json: {as_json}")
89
120
  if not shutil.which("jq"):
90
- logging.error("Unable to find jq, killing process")
121
+ logger.error("Unable to find jq, killing process")
91
122
  click.echo({"lam.error": "jq is not installed"}, err=True)
92
123
  track_event('lam.run.error', {'error': 'jq is not installed', 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
93
124
  write_to_result_file({"lam.error": "jq is not installed"}, result_file)
94
125
  return
95
126
 
96
- input_data, error = process_input(input_json, workspace_id, flow_id)
127
+ input_data, error = process_input(input, workspace_id, flow_id)
97
128
  if error:
98
129
  click.echo({"lam.error": f"Invalid input: {error}"}, err=True)
99
130
  track_event('lam.run.error', {'error': f"Invalid input: {error}", 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
100
131
  write_to_result_file({"lam.error": f"Invalid input: {error}"}, result_file)
101
132
  return
102
133
 
103
- jq_script = parse_program(program)
104
- track_event('lam.run.start', {'script': jq_script, 'as_json': as_json, 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
134
+ jq_script = parse_program_file(program_file)
135
+ track_event('lam.run.start', {'program_file': program_file, 'as_json': as_json, 'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
105
136
  output, jq_error = run_jq(jq_script, input_data)
106
137
 
107
138
  if jq_error:
@@ -120,7 +151,8 @@ def run(program, input_json, workspace_id, flow_id, as_json):
120
151
  track_event('lam.run.success', {'workspace_id': workspace_id, 'flow_id': flow_id}, workspace_id, flow_id)
121
152
  write_to_result_file(result, result_file)
122
153
 
123
- logging.info("Run complete, waiting for event logger to finish")
154
+ logger.info("Run complete, waiting for event logger to finish")
155
+ logger.removeHandler(file_handler)
124
156
 
125
157
  if __name__ == '__main__':
126
- lam()
158
+ lam()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lam-cli
3
- Version: 0.0.5
3
+ Version: 0.0.7
4
4
  Summary: Laminar data transformation tool
5
5
  Home-page: https://github.com/laminar-run/lam
6
6
  Author: Laminar Run, Inc.
@@ -9,6 +9,7 @@ License: GPLv3
9
9
  License-File: LICENSE
10
10
  Requires-Dist: click
11
11
  Requires-Dist: posthog
12
+ Requires-Dist: logtail-python
12
13
 
13
14
 
14
15
  Laminar is a platform that makes building and maintaining API integrations faster.
@@ -0,0 +1,3 @@
1
+ click
2
+ posthog
3
+ logtail-python
@@ -2,11 +2,12 @@ from setuptools import find_packages, setup
2
2
 
3
3
  setup(
4
4
  name='lam-cli',
5
- version='0.0.5',
5
+ version='0.0.7',
6
6
  packages=find_packages(),
7
7
  install_requires=[
8
8
  'click',
9
9
  'posthog',
10
+ 'logtail-python',
10
11
  ],
11
12
  entry_points={
12
13
  'console_scripts': [
@@ -1,2 +0,0 @@
1
- click
2
- posthog
File without changes
File without changes
File without changes