codeapi-client 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
codeapi/types/_env.py ADDED
@@ -0,0 +1,65 @@
1
+ import json
2
+ from io import BytesIO, IOBase
3
+ from pathlib import Path
4
+ from typing import BinaryIO
5
+
6
+ import dotenv
7
+
8
+
9
+ class EnvVars(dict[str, str]):
10
+ def __init__(self, env_vars: dict[str, str] | str | None = None):
11
+ super().__init__()
12
+ if isinstance(env_vars, str):
13
+ self.update(json.loads(env_vars))
14
+ elif env_vars:
15
+ self.update(env_vars)
16
+
17
+ def __setitem__(self, key, value) -> None:
18
+ if not isinstance(key, str) or not isinstance(value, str):
19
+ raise TypeError("EnvVars keys and values must be strings")
20
+ super().__setitem__(key, value)
21
+
22
+ @property
23
+ def json_str(self) -> str:
24
+ return json.dumps(self)
25
+
26
+ def dump(self) -> str:
27
+ return self.json_str
28
+
29
+ def print(self) -> None:
30
+ print("\n".join([f"{k}={v}" for k, v in self.items()]))
31
+
32
+ @staticmethod
33
+ def _replace_none_values(dotenv_values: dict[str, str | None]) -> dict[str, str]:
34
+ return {k: ("" if v is None else v) for k, v in dotenv_values.items()}
35
+
36
+ @classmethod
37
+ def from_bytes(cls, data_bytes: bytes | BinaryIO) -> "EnvVars":
38
+ if isinstance(data_bytes, (BinaryIO, IOBase)):
39
+ data_bytes.seek(0)
40
+ data_bytes = data_bytes.read()
41
+ dotenv_values = json.loads(data_bytes)
42
+ return cls(cls._replace_none_values(dotenv_values))
43
+
44
+ @classmethod
45
+ def from_str(cls, json_str: str) -> "EnvVars":
46
+ return cls(json.loads(json_str))
47
+
48
+ @classmethod
49
+ def from_file(cls, path: Path | str) -> "EnvVars":
50
+ dotenv_values = dotenv.dotenv_values(dotenv_path=path)
51
+ return cls(cls._replace_none_values(dotenv_values))
52
+
53
+ @classmethod
54
+ def from_dict(cls, env_vars: dict[str, str]) -> "EnvVars":
55
+ return cls(env_vars)
56
+
57
+ def to_file(self, path: Path | str) -> None:
58
+ data = [f"{k}={v}" for k, v in self.items()]
59
+ Path(path).write_text("\n".join(data))
60
+
61
+ def to_bytesio(self) -> BytesIO:
62
+ return BytesIO(json.dumps(self).encode())
63
+
64
+ def to_dict(self) -> dict[str, str]:
65
+ return dict(self)
codeapi/types/_exc.py ADDED
@@ -0,0 +1,35 @@
1
+ from fastapi import HTTPException
2
+
3
+ from ._enums import CodeAPIError
4
+
5
+
6
+ class CodeAPIException(HTTPException):
7
+ def __init__(self, error: CodeAPIError, message: str = ""):
8
+ self.error: CodeAPIError = error
9
+ self.message: str = message
10
+
11
+ status_code = self._get_status_code()
12
+ detail = {"error": self.error.name, "message": message}
13
+ super().__init__(status_code=status_code, detail=detail)
14
+
15
+ def _get_status_code(self) -> int:
16
+ if self.error == CodeAPIError.CODE_NOT_FOUND:
17
+ return 404
18
+ elif self.error == CodeAPIError.JOB_FAILED:
19
+ return 500
20
+ elif self.error == CodeAPIError.JOB_NOT_FINISHED:
21
+ return 400
22
+ elif self.error == CodeAPIError.JOB_NOT_FOUND:
23
+ return 404
24
+ elif self.error == CodeAPIError.JOB_STILL_QUEUED:
25
+ return 503
26
+ elif self.error == CodeAPIError.JOB_TIMED_OUT:
27
+ return 408
28
+ elif self.error == CodeAPIError.NO_ASSOCIATED_CODE_ID:
29
+ return 404
30
+ elif self.error == CodeAPIError.NO_JOB_META:
31
+ return 404
32
+ elif self.error == CodeAPIError.NO_STORAGE_BACKEND:
33
+ return 501
34
+ else:
35
+ return 500
codeapi/types/_job.py ADDED
@@ -0,0 +1,67 @@
1
+ from pydantic import BaseModel, Field
2
+
3
+ from ._enums import JobStage, JobStatus, JobType
4
+
5
+
6
+ class Job(BaseModel):
7
+ job_id: str
8
+ job_type: JobType
9
+ job_status: JobStatus
10
+ job_error: str | None = Field(default=None)
11
+ job_traceback: str | None = Field(default=None, repr=False)
12
+ code_id: str | None = None
13
+
14
+ @property
15
+ def job_stage(self) -> JobStage:
16
+ return self.job_status.stage
17
+
18
+ @property
19
+ def is_queued(self) -> bool:
20
+ return self.job_stage == JobStatus.QUEUED
21
+
22
+ @property
23
+ def is_scheduled(self) -> bool:
24
+ return self.job_stage == JobStatus.SCHEDULED
25
+
26
+ @property
27
+ def is_pre_running(self) -> bool:
28
+ return self.job_stage == JobStage.PRE_RUNNING
29
+
30
+ @property
31
+ def is_running(self) -> bool:
32
+ return self.job_stage == JobStage.RUNNING
33
+
34
+ @property
35
+ def is_post_running(self) -> bool:
36
+ return self.job_stage == JobStage.POST_RUNNING
37
+
38
+ @property
39
+ def has_finished(self) -> bool:
40
+ return self.job_status == JobStatus.FINISHED
41
+
42
+ @property
43
+ def has_failed(self) -> bool:
44
+ return self.is_post_running and not self.has_finished
45
+
46
+ @property
47
+ def has_timed_out(self) -> bool:
48
+ return self.job_status == JobStatus.TIMEOUT
49
+
50
+ @property
51
+ def has_been_stopped(self) -> bool:
52
+ return self.job_status == JobStatus.STOPPED
53
+
54
+ @property
55
+ def has_been_canceled(self) -> bool:
56
+ return self.job_status == JobStatus.CANCELED
57
+
58
+ @property
59
+ def has_been_deferred(self) -> bool:
60
+ return self.job_status == JobStatus.DEFERRED
61
+
62
+ def __str__(self):
63
+ fields = self.__class__.__pydantic_fields__
64
+ field_values = ", ".join(
65
+ f"{k}={repr(v)}" for k, v in self.model_dump().items() if fields[k].repr
66
+ )
67
+ return f"{self.__class__.__name__}({field_values})"
codeapi/types/_json.py ADDED
@@ -0,0 +1,67 @@
1
+ import json
2
+ from io import BytesIO, IOBase
3
+ from pathlib import Path
4
+ from typing import Any, BinaryIO, OrderedDict
5
+
6
+
7
+ class JsonData(dict):
8
+ def __init__(self, data: dict | Path | str | bytes | None = None):
9
+ super().__init__()
10
+ if data is None:
11
+ pass
12
+ elif isinstance(data, (str, bytes)):
13
+ self.update(json.loads(data))
14
+ elif isinstance(data, Path):
15
+ if data.is_file():
16
+ self.update(json.loads(data.read_text()))
17
+ else:
18
+ raise ValueError(f"{data} is not a file")
19
+ elif isinstance(data, dict):
20
+ self.update(data)
21
+ else:
22
+ raise TypeError("Invalid data type for JsonData")
23
+
24
+ @property
25
+ def json_str(self) -> str:
26
+ return json.dumps(self)
27
+
28
+ def dump(self) -> str:
29
+ return self.json_str
30
+
31
+ def print(self, indent: int = 4) -> None:
32
+ print(json.dumps(self, indent=indent))
33
+
34
+ def add_key(self, key: Any, value: Any, prepend: bool = False) -> None:
35
+ new_data = OrderedDict()
36
+ if prepend:
37
+ new_data[key] = value
38
+ new_data.update(self)
39
+ else:
40
+ new_data.update(self)
41
+ new_data[key] = value
42
+ self.clear()
43
+ self.update(new_data)
44
+
45
+ def remove_key(self, key: Any) -> None:
46
+ self.pop(key, None)
47
+
48
+ @classmethod
49
+ def from_file(cls, path: Path | str) -> "JsonData":
50
+ return cls(json.loads(Path(path).read_text()))
51
+
52
+ @classmethod
53
+ def from_str(cls, json_str: str) -> "JsonData":
54
+ return cls(json_str)
55
+
56
+ @classmethod
57
+ def from_bytes(cls, data_bytes: bytes | BinaryIO) -> "JsonData":
58
+ if isinstance(data_bytes, (BinaryIO, IOBase)):
59
+ data_bytes.seek(0)
60
+ data_bytes = data_bytes.read()
61
+ return cls(json.loads(data_bytes))
62
+
63
+ def to_file(self, path: Path | str) -> None:
64
+ Path(path).write_text(json.dumps(self, indent=4))
65
+
66
+ def to_bytesio(self) -> BytesIO:
67
+ return BytesIO(json.dumps(self).encode())
@@ -0,0 +1,36 @@
1
+ from datetime import datetime
2
+ from typing import Any, Literal
3
+
4
+ from pydantic import BaseModel, field_validator
5
+
6
+
7
+ class StreamOutput(BaseModel):
8
+ sid: str
9
+ stream: Literal["stdout", "stderr"]
10
+ text: str
11
+
12
+ @property
13
+ def timestamp(self) -> float:
14
+ return int(self.sid.split("-")[0]) / 1000
15
+
16
+ @property
17
+ def datetime(self) -> datetime:
18
+ return datetime.fromtimestamp(self.timestamp)
19
+
20
+ @property
21
+ def seq_number(self) -> int:
22
+ return int(self.sid.split("-")[1])
23
+
24
+ @field_validator("stream", "text", mode="before")
25
+ @classmethod
26
+ def decode_bytes(cls, v: Any) -> str:
27
+ if isinstance(v, bytes):
28
+ return v.decode()
29
+ return v
30
+
31
+ def __str__(self):
32
+ fields = self.__class__.__pydantic_fields__
33
+ field_values = ", ".join(
34
+ f"{k}={repr(v)}" for k, v in self.model_dump().items() if fields[k].repr
35
+ )
36
+ return f"{self.__class__.__name__}({field_values})"
@@ -0,0 +1,85 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Literal
4
+
5
+ import psutil
6
+ from pydantic import BaseModel, Field
7
+
8
+
9
+ class HiveHeartbeat(BaseModel):
10
+ hostname: str
11
+ host: str
12
+ timestamp: float
13
+ status: Literal["alive", "busy", "idle", "dead"]
14
+
15
+ cpu_count: int = Field(description="Number of CPU cores")
16
+ cpu_usage: float = Field(description="Current CPU usage percentage")
17
+
18
+ memory_total: float = Field(description="Total RAM in GB")
19
+ memory_used: float = Field(description="Used RAM in GB")
20
+ memory_free: float = Field(description="Free RAM in GB")
21
+ memory_usage: float = Field(description="Memory usage percentage")
22
+
23
+ disk_total: float = Field(description="Total disk space in GB")
24
+ disk_used: float = Field(description="Used disk space in GB")
25
+ disk_free: float = Field(description="Free disk space in GB")
26
+ disk_usage: float = Field(description="Disk usage percentage")
27
+
28
+ load_avg_1min: float | None = Field(
29
+ default=None, description="1-minute load average"
30
+ )
31
+ load_avg_5min: float | None = Field(
32
+ default=None, description="5-minute load average"
33
+ )
34
+ load_avg_15min: float | None = Field(
35
+ default=None, description="15-minute load average"
36
+ )
37
+
38
+ @classmethod
39
+ def create(
40
+ cls,
41
+ hostname: str,
42
+ host: str,
43
+ timestamp: float,
44
+ status: Literal["alive", "busy", "idle", "dead"],
45
+ ) -> HiveHeartbeat:
46
+ cpu_usage = psutil.cpu_percent(interval=1)
47
+ memory = psutil.virtual_memory()
48
+ disk = psutil.disk_usage("/")
49
+
50
+ try:
51
+ load_avg = psutil.getloadavg()
52
+ load_1, load_5, load_15 = load_avg
53
+ except (AttributeError, OSError):
54
+ load_1 = load_5 = load_15 = None
55
+
56
+ return cls(
57
+ hostname=hostname,
58
+ host=host,
59
+ timestamp=timestamp,
60
+ status=status,
61
+ cpu_count=psutil.cpu_count(),
62
+ cpu_usage=cpu_usage,
63
+ memory_total=memory.total / (1024**3),
64
+ memory_used=memory.used / (1024**3),
65
+ memory_free=memory.available / (1024**3),
66
+ memory_usage=memory.percent,
67
+ disk_total=disk.total / (1024**3),
68
+ disk_used=disk.used / (1024**3),
69
+ disk_free=disk.free / (1024**3),
70
+ disk_usage=disk.percent,
71
+ load_avg_1min=load_1,
72
+ load_avg_5min=load_5,
73
+ load_avg_15min=load_15,
74
+ )
75
+
76
+
77
+ class SwarmStats(BaseModel):
78
+ total_hives: int = 0
79
+ active_hives: int = 0
80
+ busy_hives: int = 0
81
+ idle_hives: int = 0
82
+ total_cpu_cores: int = 0
83
+ avg_cpu_usage: float = 0.0
84
+ total_memory_gb: float = 0.0
85
+ avg_memory_usage: float = 0.0
codeapi/types/_time.py ADDED
@@ -0,0 +1,46 @@
1
+ from enum import IntEnum
2
+
3
+
4
+ class T_SECONDS(IntEnum):
5
+ ONE_MINUTE = 60
6
+ ONE_HOUR = 3600
7
+ ONE_DAY = 86400
8
+ ONE_WEEK = 604800
9
+ TWO_WEEKS = 1209600
10
+ FOUR_WEEKS = 2419200
11
+ SIX_WEEKS = 3628800
12
+ ONE_MONTH = 2592000
13
+ THREE_MONTHS = 7776000
14
+ SIX_MONTHS = 15552000
15
+ ONE_YEAR = 31536000
16
+ TEN_YEARS = 315360000
17
+ INFINITE = 2147483647
18
+
19
+
20
+ class T_MINUTES(IntEnum):
21
+ ONE_HOUR = 60
22
+ ONE_DAY = 1440
23
+ ONE_WEEK = 10080
24
+ TWO_WEEKS = 20160
25
+ FOUR_WEEKS = 40320
26
+ SIX_WEEKS = 60480
27
+ ONE_MONTH = 43200
28
+ THREE_MONTHS = 129600
29
+ SIX_MONTHS = 259200
30
+ ONE_YEAR = 525600
31
+ TEN_YEARS = 5256000
32
+ INFINITE = 2147483647
33
+
34
+
35
+ class T_HOURS(IntEnum):
36
+ ONE_DAY = 24
37
+ ONE_WEEK = 168
38
+ TWO_WEEKS = 336
39
+ FOUR_WEEKS = 672
40
+ SIX_WEEKS = 1008
41
+ ONE_MONTH = 720
42
+ THREE_MONTHS = 2160
43
+ SIX_MONTHS = 4320
44
+ ONE_YEAR = 8760
45
+ TEN_YEARS = 87600
46
+ INFINITE = 2147483647