primitive 0.2.34__py3-none-any.whl → 0.2.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
primitive/__about__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@primitive.tech>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.2.34"
4
+ __version__ = "0.2.36"
@@ -102,4 +102,4 @@ class Agent(BaseAction):
102
102
 
103
103
  sleep(5)
104
104
  except KeyboardInterrupt:
105
- logger.info("[agent] Stopping primitive agent...")
105
+ logger.info("Stopping primitive agent...")
primitive/agent/runner.py CHANGED
@@ -3,11 +3,12 @@ import os
3
3
  import re
4
4
  import shutil
5
5
  import typing
6
- from abc import abstractmethod
7
6
  from enum import Enum, IntEnum
8
7
  from pathlib import Path, PurePath
9
8
  from typing import Dict, List, TypedDict
10
9
 
10
+ from ..utils.logging import log_context, fmt
11
+
11
12
  import yaml
12
13
  from loguru import logger
13
14
 
@@ -57,16 +58,6 @@ class LogLevel(Enum):
57
58
  WARNING = "WARNING"
58
59
 
59
60
 
60
- # Log Counter
61
- class LogCounter:
62
- count = 0
63
-
64
- @classmethod
65
- def next(cls) -> int:
66
- cls.count += 1
67
- return cls.count
68
-
69
-
70
61
  class Runner:
71
62
  def __init__(
72
63
  self,
@@ -79,7 +70,7 @@ class Runner:
79
70
  self.job_run = job_run
80
71
  self.job_settings = job_run["jobSettings"]
81
72
  self.config = None
82
- self.source_dir: Path = None
73
+ self.source_dir: Path | None = None
83
74
  self.initial_env = {}
84
75
  self.modified_env = {}
85
76
  self.file_logger = None
@@ -91,11 +82,11 @@ class Runner:
91
82
  self.file_logger = logger.add(
92
83
  Path(get_logs_cache(self.job_run["id"]) / log_name),
93
84
  rotation=max_log_size,
94
- format=Runner.fmt,
85
+ format=fmt,
95
86
  backtrace=True,
96
- diagnose=True,
97
87
  )
98
88
 
89
+ @log_context(label="setup")
99
90
  def setup(self) -> None:
100
91
  # Attempt to download the job source code
101
92
  git_repo_full_name = self.job_run["gitCommit"]["repoFullName"]
@@ -155,66 +146,62 @@ class Runner:
155
146
  self.job_run["gitCommit"]["repoFullName"]
156
147
  )
157
148
 
149
+ @log_context(label="execute")
158
150
  def execute(self) -> None:
159
151
  logger.info(f"Executing {self.job['slug']} job")
160
152
  self.primitive.jobs.job_run_update(
161
153
  self.job_run["id"], status="request_in_progress"
162
154
  )
163
- self.modified_env = {**self.initial_env}
164
155
 
156
+ self.modified_env = {**self.initial_env}
165
157
  task_failed = False
166
158
  cancelled = False
167
159
 
168
160
  for task in self.config["executes"]:
169
- # the get status check here is to ensure that if cancel is called
170
- # while one task is running, we do not run any OTHER laebeled tasks
171
- # THIS is required for MULTI STEP JOBS
161
+ # Everything inside this loop should be contextualized with the task label
162
+ # this way we aren't jumping back and forth between the task label and "execute"
163
+ with logger.contextualize(label=task["label"]):
164
+ # the get status check here is to ensure that if cancel is called
165
+ # while one task is running, we do not run any OTHER labeled tasks
166
+ # THIS is required for MULTI STEP JOBS
167
+ status = self.primitive.jobs.get_job_status(self.job_run["id"])
168
+ status_value = status.data["jobRun"]["status"]
169
+ conclusion_value = status.data["jobRun"]["conclusion"]
170
+
171
+ if status_value == "completed" and conclusion_value == "cancelled":
172
+ cancelled = True
173
+ break
174
+
175
+ # Everything within this block should be contextualized as user logs
176
+ with logger.contextualize(type="user"):
177
+ with asyncio.Runner() as async_runner:
178
+ if task_failed := async_runner.run(self.run_task(task)):
179
+ break
180
+
181
+ # FOR NONE MULTI STEP JOBS
182
+ # we still have to check that the job was cancelled here as well
183
+ with logger.contextualize(label="conclusion"):
172
184
  status = self.primitive.jobs.get_job_status(self.job_run["id"])
173
185
  status_value = status.data["jobRun"]["status"]
174
186
  conclusion_value = status.data["jobRun"]["conclusion"]
175
-
176
187
  if status_value == "completed" and conclusion_value == "cancelled":
177
188
  cancelled = True
178
- break
179
189
 
180
- with logger.contextualize(label=task["label"]):
181
- with asyncio.Runner() as async_runner:
182
- if task_failed := async_runner.run(self.run_task(task)):
183
- break
190
+ if cancelled:
191
+ logger.warning("Job cancelled by user")
192
+ return
184
193
 
185
- number_of_files_produced = self.get_number_of_files_produced()
186
- logger.info(
187
- f"Produced {number_of_files_produced} files for {self.job['slug']} job"
188
- )
194
+ conclusion = "success"
195
+ if task_failed:
196
+ conclusion = "failure"
197
+ else:
198
+ logger.success(f"Completed {self.job['slug']} job")
189
199
 
190
- # FOR NONE MULTI STEP JOBS
191
- # we still have to check that the job was cancelled here as well
192
- status = self.primitive.jobs.get_job_status(self.job_run["id"])
193
- status_value = status.data["jobRun"]["status"]
194
- conclusion_value = status.data["jobRun"]["conclusion"]
195
- if status_value == "completed" and conclusion_value == "cancelled":
196
- cancelled = True
197
-
198
- if cancelled:
199
- logger.warning("Job cancelled by user")
200
200
  self.primitive.jobs.job_run_update(
201
201
  self.job_run["id"],
202
- number_of_files_produced=number_of_files_produced,
202
+ status="request_completed",
203
+ conclusion=conclusion,
203
204
  )
204
- return
205
-
206
- conclusion = "success"
207
- if task_failed:
208
- conclusion = "failure"
209
- else:
210
- logger.success(f"Completed {self.job['slug']} job")
211
-
212
- self.primitive.jobs.job_run_update(
213
- self.job_run["id"],
214
- status="request_completed",
215
- conclusion=conclusion,
216
- number_of_files_produced=number_of_files_produced,
217
- )
218
205
 
219
206
  def get_number_of_files_produced(self) -> int:
220
207
  """Returns the number of files produced by the job."""
@@ -261,7 +248,9 @@ class Runner:
261
248
  return number_of_files_produced
262
249
 
263
250
  async def run_task(self, task: Task) -> bool:
264
- for cmd in task["cmd"].strip().split("\n"):
251
+ logger.info(f"Running step '{task['label']}'")
252
+ commands = task["cmd"].strip().split("\n")
253
+ for i, cmd in enumerate(commands):
265
254
  # Adding an additional echo and utilizing stdbuf to force line buffering
266
255
  # This ensures that the environment variables and starting delimiter are
267
256
  # always in a new chunk, vastly simplifying our parsing logic
@@ -271,6 +260,8 @@ class Runner:
271
260
  f"{cmd} && echo -n '{ENV_VAR_LOOKUP_START}' && env && echo -n '{ENV_VAR_LOOKUP_END}'",
272
261
  ]
273
262
 
263
+ logger.info(f"Executing command {i + 1}/{len(commands)}: {cmd}")
264
+
274
265
  process = await asyncio.create_subprocess_exec(
275
266
  *args,
276
267
  env=self.modified_env,
@@ -294,6 +285,10 @@ class Runner:
294
285
 
295
286
  returncode = await process.wait()
296
287
 
288
+ logger.info(
289
+ f"Finished executing command {i + 1}/{len(commands)}: {cmd} with return code {returncode}"
290
+ )
291
+
297
292
  JobRun.objects.filter_by(job_run_id=self.job_run["id"]).update(
298
293
  {"pid": None}
299
294
  )
@@ -307,6 +302,7 @@ class Runner:
307
302
  logger.error(f"Task {task['label']} failed on '{cmd}'")
308
303
  return True
309
304
 
305
+ logger.success(f"Completed {task['label']} task")
310
306
  return False
311
307
 
312
308
  async def log_cmd(self, process, stream, tags: Dict = {}) -> bool:
@@ -401,9 +397,8 @@ class Runner:
401
397
 
402
398
  return [line for line in lines if len(line) > 0]
403
399
 
400
+ @log_context(label="cleanup")
404
401
  def cleanup(self) -> None:
405
- logger.remove(self.file_logger)
406
-
407
402
  if "stores" not in self.config:
408
403
  return
409
404
 
@@ -419,23 +414,13 @@ class Runner:
419
414
 
420
415
  shutil.rmtree(path=self.source_dir)
421
416
 
422
- @abstractmethod
423
- def fmt(record) -> str:
424
- extra = record["extra"]
425
- # Delimiters with empty space MUST exist for LogQL pattern matching
426
- label = extra.get("label", None)
427
- tag = extra.get("tag", None)
428
- context = f"{label} | " if label else " | "
429
- context += f"{tag} | " if tag else " | "
430
-
431
- log = (
432
- f"{LogCounter.next()} | "
433
- "<green>{time:YYYY-MM-DD HH:mm:ss.SSS!UTC}</green> | "
434
- "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | "
435
- "<level>{level}</level> | "
436
- f"{context}"
437
- "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
438
- "<level>{message}</level>\n"
417
+ number_of_files_produced = self.get_number_of_files_produced()
418
+ logger.info(
419
+ f"Produced {number_of_files_produced} files for {self.job['slug']} job"
420
+ )
421
+ self.primitive.jobs.job_run_update(
422
+ self.job_run["id"],
423
+ number_of_files_produced=number_of_files_produced,
439
424
  )
440
425
 
441
- return log
426
+ logger.remove(self.file_logger)
@@ -0,0 +1,44 @@
1
+ from functools import wraps
2
+ from loguru import logger
3
+ from datetime import timezone
4
+ import json
5
+
6
+
7
+ def log_context(**context):
8
+ def decorator(func):
9
+ @wraps(func)
10
+ def wrapper(*args, **kwargs):
11
+ with logger.contextualize(**context):
12
+ return func(*args, **kwargs)
13
+
14
+ return wrapper
15
+
16
+ return decorator
17
+
18
+
19
+ def fmt(record) -> str:
20
+ extra = record["extra"]
21
+ label = extra["label"]
22
+ tag = extra.get("tag", None)
23
+ type = extra.get("type", "system")
24
+
25
+ context_object = {
26
+ "label": label,
27
+ "type": type,
28
+ "utc": record["time"]
29
+ .astimezone(timezone.utc)
30
+ .strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
31
+ "level": record["level"].name,
32
+ "name": record["name"],
33
+ "function": record["function"],
34
+ "line": record["line"],
35
+ "message": record["message"],
36
+ }
37
+
38
+ if tag:
39
+ context_object["tag"] = tag
40
+
41
+ # Loguru will fail if you return a string that doesn't select
42
+ # something within its record
43
+ record["extra"]["serialized"] = json.dumps(context_object)
44
+ return "{extra[serialized]}\n"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: primitive
3
- Version: 0.2.34
3
+ Version: 0.2.36
4
4
  Project-URL: Documentation, https://github.com//primitivecorp/primitive-cli#readme
5
5
  Project-URL: Issues, https://github.com//primitivecorp/primitive-cli/issues
6
6
  Project-URL: Source, https://github.com//primitivecorp/primitive-cli
@@ -1,11 +1,11 @@
1
- primitive/__about__.py,sha256=s58wIznYy_MiaxrfZEt9pFQ41lrUgI4Bc_OF1gkNEwg,130
1
+ primitive/__about__.py,sha256=5-Cyrivu3AqsJWwi4il_NEU7wvPqzCfGtM5hHx7QXoM,130
2
2
  primitive/__init__.py,sha256=bwKdgggKNVssJFVPfKSxqFMz4IxSr54WWbmiZqTMPNI,106
3
3
  primitive/cli.py,sha256=g7EtHI9MATAB0qQu5w-WzbXtxz_8zu8z5E7sETmMkKU,2509
4
4
  primitive/client.py,sha256=h8WZVnQylVe0vbpuyC8YZHl2JyITSPC-1HbUcmrE5pc,3623
5
5
  primitive/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- primitive/agent/actions.py,sha256=JLnHCtu0AgeHRZccCc2I39mziLixmj9EKtKeYmvBE7A,3700
6
+ primitive/agent/actions.py,sha256=VHlNTw2M-T1MRajpIBu2weT8AsdDdYtlgbBnRBYYeco,3692
7
7
  primitive/agent/commands.py,sha256=cK7d3OcN5Z65gQWVZFQ-Y9ddw9Pes4f9OVBpeMsj5sE,255
8
- primitive/agent/runner.py,sha256=Vuz0bJjfq3IytLI6HxC9Gfd1TdARjvNsG273GEWxjKI,15462
8
+ primitive/agent/runner.py,sha256=UMLCF0BhyBRJLGntB1C5dDCmIVYjE7AiI2RqpupJRd0,15464
9
9
  primitive/agent/uploader.py,sha256=ZzrzsajNBogwEC7mT6Ejy0h2Jd9axMYGzt9pbCvVMlk,3171
10
10
  primitive/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  primitive/auth/actions.py,sha256=9NIEXJ1BNJutJs6AMMSjMN_ziONUAUhY_xHwojYJCLA,942
@@ -92,12 +92,13 @@ primitive/utils/chunk_size.py,sha256=PAuVuirUTA9oRXyjo1c6MWxo31WVBRkWMuWw-AS58Bw
92
92
  primitive/utils/config.py,sha256=DlFM5Nglo22WPtbpZSVtH7NX-PTMaKYlcrUE7GPRG4c,1058
93
93
  primitive/utils/daemons.py,sha256=mSoSHitiGfS4KYAEK9sKsiv_YcACHKgY3qISnDpUUIE,1086
94
94
  primitive/utils/exceptions.py,sha256=DrYHTcCAJGC7cCUwOx_FmdlVLWRdpzvDvpLb82heppE,311
95
+ primitive/utils/logging.py,sha256=W-MY6B---nG7A5lg17t5ZWTGytO0Y8_KPUvFCs5MBcs,1121
95
96
  primitive/utils/memory_size.py,sha256=4xfha21kW82nFvOTtDFx9Jk2ZQoEhkfXii-PGNTpIUk,3058
96
97
  primitive/utils/printer.py,sha256=f1XUpqi5dkTL3GWvYRUGlSwtj2IxU1q745T4Fxo7Tn4,370
97
98
  primitive/utils/shell.py,sha256=Z4zxmOaSyGCrS0D6I436iQci-ewHLt4UxVg1CD9Serc,2171
98
99
  primitive/utils/text.py,sha256=XiESMnlhjQ534xE2hMNf08WehE1SKaYFRNih0MmnK0k,829
99
- primitive-0.2.34.dist-info/METADATA,sha256=tXBOqx2I7GoxBe13HXcWi0qSby1oIwlX4_hdwNicsYQ,3569
100
- primitive-0.2.34.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
101
- primitive-0.2.34.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
102
- primitive-0.2.34.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
103
- primitive-0.2.34.dist-info/RECORD,,
100
+ primitive-0.2.36.dist-info/METADATA,sha256=fSyVyuB8t19XnCtCHqh5gcP1QPhBdl_MSWvRC0ybcFo,3569
101
+ primitive-0.2.36.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
102
+ primitive-0.2.36.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
103
+ primitive-0.2.36.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
104
+ primitive-0.2.36.dist-info/RECORD,,