primitive 0.1.67__py3-none-any.whl → 0.1.69__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
primitive/__about__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@primitive.tech>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.1.67"
4
+ __version__ = "0.1.69"
@@ -41,7 +41,14 @@ class Process:
41
41
  continue
42
42
 
43
43
  if key.fileobj is self.process.stdout:
44
- logger.info(data.rstrip())
44
+ raw_data = data.rstrip()
45
+ if "error" in raw_data.lower():
46
+ logger.error(raw_data)
47
+ self._errors += 1
48
+ elif "warning" in raw_data.lower():
49
+ logger.warning(raw_data)
50
+ else:
51
+ logger.info(raw_data)
45
52
  elif key.fileobj is self.process.stderr:
46
53
  logger.error(data.rstrip())
47
54
  self._errors += 1
primitive/agent/runner.py CHANGED
@@ -1,9 +1,10 @@
1
1
  import os
2
2
  import threading
3
3
  import typing
4
+ import json
4
5
  from pathlib import Path, PurePath
5
6
  from time import sleep
6
- from typing import Dict, Iterable, List, Optional, TypedDict
7
+ from typing import Dict, Iterable, List, Optional, TypedDict, Callable
7
8
 
8
9
  import yaml
9
10
  from loguru import logger
@@ -47,6 +48,7 @@ class AgentRunner:
47
48
  job_id: str,
48
49
  job_slug: str,
49
50
  max_log_size: int = 10 * 1024 * 1024,
51
+ log_to_file: bool = True,
50
52
  ) -> None:
51
53
  self.primitive = primitive
52
54
  self.source_dir = source_dir
@@ -54,16 +56,21 @@ class AgentRunner:
54
56
  self.job_id = job_id
55
57
  self.job_slug = job_slug
56
58
  self.max_log_size = max_log_size
57
- self.artifacts_dir = get_artifacts_cache(self.job_id)
58
- self.logs_dir = get_logs_cache(self.job_id)
59
- self.logger_handle = None
59
+ self.log_to_file = log_to_file
60
60
 
61
+ # Enable and configure logger
61
62
  logger.enable("primitive")
62
- self.swap_logs(label="init")
63
63
 
64
- logger.info(f"Scanning directory for job {self.job_slug}")
64
+ if self.log_to_file:
65
+ log_name = f"{self.job_slug}_{self.job_id}_{{time}}.primitive.log"
66
+ logger.add(
67
+ Path(get_logs_cache(self.job_id) / log_name),
68
+ rotation=self.max_log_size,
69
+ format=AgentRunner.log_serializer(),
70
+ )
65
71
 
66
- # Look for job based on slug
72
+ # Attempt to load job from file
73
+ logger.info(f"Scanning directory for job file {self.job_slug}")
67
74
  yaml_file = Path(self.source_dir / ".primitive" / f"{self.job_slug}.yaml")
68
75
  yml_file = Path(self.source_dir / ".primitive" / f"{self.job_slug}.yml")
69
76
 
@@ -71,7 +78,9 @@ class AgentRunner:
71
78
  logger.error(
72
79
  f"Found two job descriptions with the same slug: {self.job_slug}"
73
80
  )
74
- self.conclude(conclusion="failure")
81
+ self.primitive.jobs.job_run_update(
82
+ self.job_id, status="request_completed", conclusion="failure"
83
+ )
75
84
  raise FileExistsError
76
85
 
77
86
  if yaml_file.exists():
@@ -82,11 +91,35 @@ class AgentRunner:
82
91
  logger.error(
83
92
  f"No job description with matching slug '{self.job_slug}' found"
84
93
  )
85
- self.conclude(conclusion="failure")
94
+ self.primitive.jobs.job_run_update(
95
+ self.job_id, status="request_completed", conclusion="failure"
96
+ )
86
97
  raise FileNotFoundError
87
98
 
88
99
  logger.info(f"Found job description for {self.job_slug}")
89
100
 
101
+ @staticmethod
102
+ def log_serializer() -> Callable:
103
+ def fmt(record):
104
+ step = ""
105
+ if "step" in record["extra"]:
106
+ step = record["extra"]["step"]
107
+
108
+ log = {
109
+ "time": record["time"].strftime("%Y-%m-%d %H:%M:%S.%f"),
110
+ "utc": record["time"].strftime("%Y-%m-%d %H:%M:%S.%f%z"),
111
+ "level": record["level"].name,
112
+ "message": record["message"],
113
+ "name": record["name"],
114
+ "step": step,
115
+ }
116
+
117
+ record["extra"]["serialized"] = json.dumps(log)
118
+
119
+ return "{extra[serialized]}\n"
120
+
121
+ return fmt
122
+
90
123
  def name(self) -> str:
91
124
  return self.job["name"]
92
125
 
@@ -98,112 +131,85 @@ class AgentRunner:
98
131
  logger.info(f"Executing {self.job_slug} job")
99
132
  self.primitive.jobs.job_run_update(self.job_id, status="request_in_progress")
100
133
 
101
- # Initial environment is the system env
134
+ # Initialize the environment with the system
102
135
  environment = os.environ
103
136
  if "provision" in self.job:
104
137
  logger.info(f"Provisioning for {self.job['provision']} environment")
105
138
  environment = self.provision()
106
139
 
107
140
  if not environment:
108
- self.conclude("failure")
141
+ self.primitive.jobs.job_run_update(
142
+ self.job_id, status="request_completed", conclusion="failure"
143
+ )
144
+ logger.error(f"{self.job_slug} concluded with error(s)")
109
145
  return
110
146
 
111
- conclusion = None
112
- total_errors = 0
147
+ total_job_errors = 0
113
148
  for step in self.steps():
114
- # Swap logger
115
- self.swap_logs(label=step["name"])
116
-
117
149
  logger.info(f"Beginning step {step['name']}")
118
150
 
119
- # Update workdir
120
- if "workdir" in step:
121
- self.workdir = step["workdir"]
122
-
123
- # Define step proc
124
- proc = Process(
125
- cmd=step["cmd"],
126
- workdir=Path(self.source_dir / self.workdir),
127
- env=environment,
128
- )
129
-
130
- # Try to start
131
- try:
132
- proc.start()
133
- except Exception as e:
134
- logger.error(f"Error while attempting to run command {e}")
135
- conclusion = "failure"
136
- break
137
-
138
- def status_check():
139
- while proc.is_running():
140
- # Check job status
141
- status = self.primitive.jobs.get_job_status(self.job_id)
142
- status_value = status.data["jobRun"]["status"]
143
-
144
- # TODO: Should probably use request_cancelled or something
145
- # once we change it, we'll have to call conclude w/ cancelled status
146
- if status_value == "completed":
147
- logger.warning("Job cancelled by user")
148
- proc.terminate()
149
- return
151
+ with logger.contextualize(step=step["name"]):
152
+ if "workdir" in step:
153
+ self.workdir = step["workdir"]
154
+
155
+ proc = Process(
156
+ cmd=step["cmd"],
157
+ workdir=Path(self.source_dir / self.workdir),
158
+ env=environment,
159
+ )
160
+
161
+ try:
162
+ proc.start()
163
+ except Exception as e:
164
+ logger.error(f"Error while attempting to run process {e}")
165
+ self.primitive.jobs.job_run_update(
166
+ self.job_id, status="request_completed", conclusion="failure"
167
+ )
168
+ logger.error(f"{self.job_slug} concluded with error(s)")
169
+ return
170
+
171
+ def status_check():
172
+ while proc.is_running():
173
+ # Check job status
174
+ status = self.primitive.jobs.get_job_status(self.job_id)
175
+ status_value = status.data["jobRun"]["status"]
176
+
177
+ # TODO: Should probably use request_cancelled or something
178
+ # once we change it, we'll have to call conclude w/ cancelled status
179
+ if status_value == "completed":
180
+ logger.warning("Job cancelled by user")
181
+ proc.terminate()
182
+ return
183
+
184
+ sleep(5)
185
+
186
+ status_thread = threading.Thread(target=status_check)
187
+ status_thread.start()
188
+
189
+ returncode = proc.wait()
190
+ total_job_errors += proc.errors
191
+ status_thread.join()
192
+
193
+ self.collect_artifacts(step)
150
194
 
151
- sleep(5)
152
-
153
- status_thread = threading.Thread(target=status_check)
154
- status_thread.start()
155
-
156
- # Wait for proc to finish
157
- returncode = proc.wait()
158
- total_errors += proc.errors
159
-
160
- # Wait for status check
161
- status_thread.join()
162
-
163
- # Collect artifacts
164
- if "artifacts" in step:
165
- self.collect_artifacts(step)
166
-
167
- # Check if we have a good result
168
195
  if returncode > 0:
169
- conclusion = "failure"
170
- break
171
-
172
- if not conclusion and total_errors == 0:
173
- conclusion = "success"
174
- else:
175
- logger.error(f"Job failed with {total_errors} errors.")
176
- conclusion = "failure"
196
+ self.primitive.jobs.job_run_update(
197
+ self.job_id, status="request_completed", conclusion="failure"
198
+ )
199
+ logger.error(f"{self.job_slug} concluded with error(s)")
200
+ return
177
201
 
178
- self.conclude(conclusion)
202
+ if total_job_errors > 0:
203
+ self.primitive.jobs.job_run_update(
204
+ self.job_id, status="request_completed", conclusion="failure"
205
+ )
206
+ logger.error(f"{self.job_slug} concluded with error(s)")
207
+ return
179
208
 
180
- def conclude(self, conclusion: str) -> None:
181
209
  self.primitive.jobs.job_run_update(
182
- self.job_id, status="request_completed", conclusion=conclusion
183
- )
184
-
185
- logger.info(f"Completed {self.job_slug} job")
186
- logger.remove(self.logger_handle)
187
-
188
- def swap_logs(self, label: str):
189
- # Remove Handle
190
- if self.logger_handle:
191
- logger.remove(self.logger_handle)
192
-
193
- # Custom format for UTC time
194
- logger_format = (
195
- "<green>{time:YYYY-MM-DD HH:mm:ss.SSS!UTC}</green> | "
196
- "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | "
197
- "<level>{level: <8}</level> | "
198
- "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
199
- "<level>{message}</level>"
200
- )
201
-
202
- self.logger_handle = logger.add(
203
- Path(self.logs_dir / f"{label}_{{time}}.primitive.log"),
204
- rotation=self.max_log_size,
205
- format=logger_format
210
+ self.job_id, status="request_completed", conclusion="success"
206
211
  )
212
+ logger.success(f"Completed {self.job_slug} job")
207
213
 
208
214
  def provision(self) -> Optional[Dict]:
209
215
  match self.job["provision"]:
@@ -220,7 +226,9 @@ class AgentRunner:
220
226
  return prov.create_env()
221
227
 
222
228
  def collect_artifacts(self, step: JobStep) -> None:
223
- # Search each artifact type
229
+ if "artifacts" not in step:
230
+ return
231
+
224
232
  for artifact in step["artifacts"]:
225
233
  files = find_files_for_extension(self.source_dir, artifact["extension"])
226
234
 
@@ -229,7 +237,7 @@ class AgentRunner:
229
237
  relative_path = PurePath(file).relative_to(self.source_dir)
230
238
 
231
239
  # Construct destination to preserve directory structure
232
- destination = Path(self.artifacts_dir / relative_path)
240
+ destination = Path(get_artifacts_cache(self.job_id) / relative_path)
233
241
 
234
242
  # Create directories if they don't exist
235
243
  destination.parent.mkdir(parents=True, exist_ok=True)
@@ -1,8 +1,8 @@
1
- import typing
2
- from typing import Dict
3
- import shutil
4
1
  import os
2
+ import shutil
3
+ import typing
5
4
  from pathlib import Path, PurePath
5
+ from typing import Dict
6
6
 
7
7
  from loguru import logger
8
8
 
@@ -45,12 +45,11 @@ class Uploader:
45
45
  )
46
46
 
47
47
  for file in files:
48
- response = self.primitive.files.upload_file_via_api(
48
+ result = self.primitive.files.upload_file_direct(
49
49
  path=file,
50
- key_prefix=str(PurePath(file).relative_to(cache.parent).parent),
51
- job_run_id=job_run_id,
50
+ key_prefix=str(PurePath(file).relative_to(cache.parent).parent)
52
51
  )
53
- upload_id = response.json()["data"]["fileUpload"]["id"]
52
+ upload_id = result.data["fileUpdate"]["id"]
54
53
 
55
54
  if upload_id:
56
55
  file_ids.append(upload_id)
primitive/cli.py CHANGED
@@ -13,11 +13,9 @@ from .files.commands import cli as file_commands
13
13
  from .git.commands import cli as git_commands
14
14
  from .hardware.commands import cli as hardware_commands
15
15
  from .jobs.commands import cli as jobs_commands
16
- from .lint.commands import cli as lint_commands
17
16
  from .organizations.commands import cli as organizations_commands
18
17
  from .projects.commands import cli as projects_commands
19
18
  from .reservations.commands import cli as reservations_commands
20
- from .sim.commands import cli as sim_commands
21
19
 
22
20
 
23
21
  @click.group()
@@ -65,14 +63,12 @@ cli.add_command(config_command, "config")
65
63
  cli.add_command(whoami_command, "whoami")
66
64
  cli.add_command(file_commands, "files")
67
65
  cli.add_command(hardware_commands, "hardware")
68
- cli.add_command(lint_commands, "lint")
69
66
  cli.add_command(agent_commands, "agent")
70
67
  cli.add_command(git_commands, "git")
71
68
  cli.add_command(daemons_commands, "daemons")
72
69
  cli.add_command(jobs_commands, "jobs")
73
70
  cli.add_command(organizations_commands, "organizations")
74
71
  cli.add_command(projects_commands, "projects")
75
- cli.add_command(sim_commands, "sim")
76
72
  cli.add_command(reservations_commands, "reservations")
77
73
  cli.add_command(exec_commands, "exec")
78
74
 
primitive/client.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import sys
2
2
 
3
+ from gql import Client
3
4
  from loguru import logger
4
5
 
5
6
  from .agent.actions import Agent
@@ -10,12 +11,10 @@ from .files.actions import Files
10
11
  from .git.actions import Git
11
12
  from .hardware.actions import Hardware
12
13
  from .jobs.actions import Jobs
13
- from .lint.actions import Lint
14
14
  from .organizations.actions import Organizations
15
15
  from .projects.actions import Projects
16
16
  from .provisioning.actions import Provisioning
17
17
  from .reservations.actions import Reservations
18
- from .sim.actions import Sim
19
18
  from .utils.config import read_config_file
20
19
 
21
20
  logger.disable("primitive")
@@ -30,10 +29,10 @@ class Primitive:
30
29
  token: str = None,
31
30
  transport: str = None,
32
31
  ) -> None:
33
- self.host = host
34
- self.session = None
35
- self.DEBUG = DEBUG
36
- self.JSON = JSON
32
+ self.host: str = host
33
+ self.session: Client = None
34
+ self.DEBUG: bool = DEBUG
35
+ self.JSON: bool = JSON
37
36
 
38
37
  if self.DEBUG:
39
38
  logger.enable("primitive")
@@ -61,10 +60,8 @@ class Primitive:
61
60
  self.projects: Projects = Projects(self)
62
61
  self.jobs: Jobs = Jobs(self)
63
62
  self.files: Files = Files(self)
64
- self.sim: Sim = Sim(self)
65
63
  self.reservations: Reservations = Reservations(self)
66
64
  self.hardware: Hardware = Hardware(self)
67
- self.lint: Lint = Lint(self)
68
65
  self.agent: Agent = Agent(self)
69
66
  self.git: Git = Git(self)
70
67
  self.daemons: Daemons = Daemons(self)