arcade-core 3.0.0__py3-none-any.whl → 3.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arcade_core/catalog.py +4 -4
- arcade_core/constants.py +6 -0
- arcade_core/usage/__init__.py +5 -0
- arcade_core/usage/__main__.py +63 -0
- arcade_core/usage/constants.py +34 -0
- arcade_core/usage/identity.py +208 -0
- arcade_core/usage/usage_service.py +104 -0
- arcade_core/usage/utils.py +13 -0
- {arcade_core-3.0.0.dist-info → arcade_core-3.1.0.dist-info}/METADATA +1 -1
- {arcade_core-3.0.0.dist-info → arcade_core-3.1.0.dist-info}/RECORD +11 -4
- {arcade_core-3.0.0.dist-info → arcade_core-3.1.0.dist-info}/WHEEL +0 -0
arcade_core/catalog.py
CHANGED
|
@@ -217,7 +217,7 @@ class ToolCatalog(BaseModel):
|
|
|
217
217
|
toolkit_name = toolkit_or_name
|
|
218
218
|
|
|
219
219
|
if not toolkit_name:
|
|
220
|
-
raise ValueError("A
|
|
220
|
+
raise ValueError("A server name or server must be provided.")
|
|
221
221
|
|
|
222
222
|
definition = ToolCatalog.create_tool_definition(
|
|
223
223
|
tool_func,
|
|
@@ -230,7 +230,7 @@ class ToolCatalog(BaseModel):
|
|
|
230
230
|
|
|
231
231
|
if fully_qualified_name in self._tools:
|
|
232
232
|
raise ToolkitLoadError(
|
|
233
|
-
f"Tool '{definition.name}' in
|
|
233
|
+
f"Tool '{definition.name}' in server '{toolkit_name}' already exists in the catalog."
|
|
234
234
|
)
|
|
235
235
|
|
|
236
236
|
if str(fully_qualified_name).lower() in self._disabled_tools:
|
|
@@ -238,7 +238,7 @@ class ToolCatalog(BaseModel):
|
|
|
238
238
|
return
|
|
239
239
|
|
|
240
240
|
if str(toolkit_name).lower() in self._disabled_toolkits:
|
|
241
|
-
logger.info(f"
|
|
241
|
+
logger.info(f"Server '{toolkit_name!s}' is disabled and will not be cataloged.")
|
|
242
242
|
return
|
|
243
243
|
|
|
244
244
|
self._tools[fully_qualified_name] = MaterializedTool(
|
|
@@ -267,7 +267,7 @@ class ToolCatalog(BaseModel):
|
|
|
267
267
|
"""
|
|
268
268
|
|
|
269
269
|
if str(toolkit).lower() in self._disabled_toolkits:
|
|
270
|
-
logger.info(f"
|
|
270
|
+
logger.info(f"Server '{toolkit.name!s}' is disabled and will not be cataloged.")
|
|
271
271
|
return
|
|
272
272
|
|
|
273
273
|
for module_name, tool_names in toolkit.tools.items():
|
arcade_core/constants.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
# The path to the directory containing the Arcade configuration files. Typically ~/.arcade
|
|
4
|
+
ARCADE_CONFIG_PATH = os.path.join(os.path.expanduser(os.getenv("ARCADE_WORK_DIR", "~")), ".arcade")
|
|
5
|
+
# The path to the file containing the user's Arcade-related credentials (e.g., ARCADE_API_KEY).
|
|
6
|
+
CREDENTIALS_FILE_PATH = os.path.join(ARCADE_CONFIG_PATH, "credentials.yaml")
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Entry point for detached usage tracking subprocess.
|
|
2
|
+
|
|
3
|
+
This module is invoked as `python -m arcade_core.usage` and expects
|
|
4
|
+
event data to be passed via the ARCADE_USAGE_EVENT_DATA environment variable.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import threading
|
|
10
|
+
|
|
11
|
+
from posthog import Posthog
|
|
12
|
+
|
|
13
|
+
from arcade_core.usage.constants import (
|
|
14
|
+
ARCADE_USAGE_EVENT_DATA,
|
|
15
|
+
MAX_RETRIES_POSTHOG,
|
|
16
|
+
PROP_PROCESS_PERSON_PROFILE,
|
|
17
|
+
TIMEOUT_POSTHOG_CAPTURE,
|
|
18
|
+
TIMEOUT_SUBPROCESS_EXIT,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _timeout_exit() -> None:
|
|
23
|
+
"""Force exit after timeout"""
|
|
24
|
+
os._exit(1)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def main() -> None:
|
|
28
|
+
"""Capture a PostHog event from environment variable."""
|
|
29
|
+
|
|
30
|
+
timeout_timer = threading.Timer(TIMEOUT_SUBPROCESS_EXIT, _timeout_exit)
|
|
31
|
+
timeout_timer.daemon = True
|
|
32
|
+
timeout_timer.start()
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
event_data = json.loads(os.environ[ARCADE_USAGE_EVENT_DATA])
|
|
36
|
+
|
|
37
|
+
if event_data.get("is_anon", False):
|
|
38
|
+
event_data["properties"][PROP_PROCESS_PERSON_PROFILE] = False
|
|
39
|
+
|
|
40
|
+
posthog = Posthog(
|
|
41
|
+
project_api_key=event_data["api_key"],
|
|
42
|
+
host=event_data["host"],
|
|
43
|
+
timeout=TIMEOUT_POSTHOG_CAPTURE,
|
|
44
|
+
max_retries=MAX_RETRIES_POSTHOG,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
posthog.capture(
|
|
48
|
+
event_data["event_name"],
|
|
49
|
+
distinct_id=event_data["distinct_id"],
|
|
50
|
+
properties=event_data["properties"],
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
posthog.flush()
|
|
54
|
+
|
|
55
|
+
timeout_timer.cancel()
|
|
56
|
+
except Exception:
|
|
57
|
+
# Silent failure. We don't want to disrupt anything
|
|
58
|
+
timeout_timer.cancel()
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
if __name__ == "__main__":
|
|
63
|
+
main()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# Base (common) Property Names
|
|
2
|
+
PROP_RUNTIME_LANGUAGE = "runtime_language"
|
|
3
|
+
PROP_RUNTIME_VERSION = "runtime_version"
|
|
4
|
+
PROP_OS_TYPE = "os_type"
|
|
5
|
+
PROP_OS_RELEASE = "os_release"
|
|
6
|
+
PROP_DURATION_MS = "duration_ms"
|
|
7
|
+
PROP_ERROR_MESSAGE = "error_message"
|
|
8
|
+
PROP_DEVICE_MONOTONIC_START = "device_start_timestamp"
|
|
9
|
+
PROP_DEVICE_MONOTONIC_END = "device_end_timestamp"
|
|
10
|
+
PROP_DEVICE_TIMESTAMP = "device_timestamp"
|
|
11
|
+
# Only used for anonymous usage
|
|
12
|
+
PROP_PROCESS_PERSON_PROFILE = "$process_person_profile"
|
|
13
|
+
|
|
14
|
+
# Identity Keys
|
|
15
|
+
KEY_ANON_ID = "anon_id"
|
|
16
|
+
KEY_LINKED_PRINCIPAL_ID = "linked_principal_id"
|
|
17
|
+
|
|
18
|
+
# File Names
|
|
19
|
+
USAGE_FILE_NAME = "usage.json"
|
|
20
|
+
|
|
21
|
+
# Environment Variables
|
|
22
|
+
# how props are passed to the usage tracking subprocess
|
|
23
|
+
ARCADE_USAGE_EVENT_DATA = "ARCADE_USAGE_EVENT_DATA"
|
|
24
|
+
# whether usage tracking is enabled. 1 is enabled, 0 is disabled.
|
|
25
|
+
ARCADE_USAGE_TRACKING = "ARCADE_USAGE_TRACKING"
|
|
26
|
+
|
|
27
|
+
# Timeouts and Limits (in seconds)
|
|
28
|
+
TIMEOUT_POSTHOG_ALIAS = 2
|
|
29
|
+
TIMEOUT_POSTHOG_CAPTURE = 5
|
|
30
|
+
TIMEOUT_ARCADE_API = 2.0
|
|
31
|
+
TIMEOUT_SUBPROCESS_EXIT = 10.0
|
|
32
|
+
|
|
33
|
+
# Retry Configuration
|
|
34
|
+
MAX_RETRIES_POSTHOG = 1
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Identity management for PostHog analytics tracking.
|
|
3
|
+
|
|
4
|
+
Handles anonymous/authenticated identity tracking with PostHog aliasing,
|
|
5
|
+
supporting pre-login anonymous tracking, post-login identity stitching,
|
|
6
|
+
and logout identity rotation.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import fcntl
|
|
10
|
+
import json
|
|
11
|
+
import os
|
|
12
|
+
import tempfile
|
|
13
|
+
import uuid
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
import httpx
|
|
17
|
+
import yaml
|
|
18
|
+
|
|
19
|
+
from arcade_core.constants import ARCADE_CONFIG_PATH, CREDENTIALS_FILE_PATH
|
|
20
|
+
from arcade_core.usage.constants import (
|
|
21
|
+
KEY_ANON_ID,
|
|
22
|
+
KEY_LINKED_PRINCIPAL_ID,
|
|
23
|
+
TIMEOUT_ARCADE_API,
|
|
24
|
+
USAGE_FILE_NAME,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class UsageIdentity:
|
|
29
|
+
"""Manages user identity for PostHog analytics tracking."""
|
|
30
|
+
|
|
31
|
+
def __init__(self) -> None:
|
|
32
|
+
self.usage_file_path = os.path.join(ARCADE_CONFIG_PATH, USAGE_FILE_NAME)
|
|
33
|
+
self._data: dict[str, Any] | None = None
|
|
34
|
+
|
|
35
|
+
def load_or_create(self) -> dict[str, Any]:
|
|
36
|
+
"""Load or create usage.json file with atomic writes and file locking.
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
dict: The usage data containing anon_id and optionally linked_email
|
|
40
|
+
"""
|
|
41
|
+
if self._data is not None:
|
|
42
|
+
return self._data
|
|
43
|
+
|
|
44
|
+
os.makedirs(ARCADE_CONFIG_PATH, exist_ok=True)
|
|
45
|
+
|
|
46
|
+
if os.path.exists(self.usage_file_path):
|
|
47
|
+
try:
|
|
48
|
+
with open(self.usage_file_path) as f:
|
|
49
|
+
# lock file
|
|
50
|
+
if os.name != "nt": # Unix-like systems
|
|
51
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
|
52
|
+
try:
|
|
53
|
+
data = json.load(f)
|
|
54
|
+
if isinstance(data, dict) and KEY_ANON_ID in data:
|
|
55
|
+
self._data = data
|
|
56
|
+
return self._data
|
|
57
|
+
finally:
|
|
58
|
+
# unlock file
|
|
59
|
+
if os.name != "nt":
|
|
60
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
61
|
+
except Exception: # noqa: S110
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
new_data = {KEY_ANON_ID: str(uuid.uuid4()), KEY_LINKED_PRINCIPAL_ID: None}
|
|
65
|
+
|
|
66
|
+
self._write_atomic(new_data)
|
|
67
|
+
self._data = new_data
|
|
68
|
+
return self._data
|
|
69
|
+
|
|
70
|
+
def _write_atomic(self, data: dict[str, Any]) -> None:
|
|
71
|
+
"""Write data atomically to usage.json file
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
data: The data to write to the usage file
|
|
75
|
+
"""
|
|
76
|
+
# Create temp file in same directory for atomic rename
|
|
77
|
+
temp_fd, temp_path = tempfile.mkstemp(
|
|
78
|
+
dir=ARCADE_CONFIG_PATH, prefix=".usage_", suffix=".tmp"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
with os.fdopen(temp_fd, "w") as f:
|
|
83
|
+
# lock file
|
|
84
|
+
if os.name != "nt": # Unix-like systems
|
|
85
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_EX)
|
|
86
|
+
try:
|
|
87
|
+
json.dump(data, f, indent=2)
|
|
88
|
+
f.flush()
|
|
89
|
+
os.fsync(f.fileno()) # ensure data is written to disk
|
|
90
|
+
finally:
|
|
91
|
+
if os.name != "nt":
|
|
92
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
93
|
+
|
|
94
|
+
os.rename(temp_path, self.usage_file_path)
|
|
95
|
+
except Exception:
|
|
96
|
+
# clean up
|
|
97
|
+
import contextlib
|
|
98
|
+
|
|
99
|
+
with contextlib.suppress(OSError):
|
|
100
|
+
os.unlink(temp_path)
|
|
101
|
+
raise
|
|
102
|
+
|
|
103
|
+
def get_distinct_id(self) -> str:
|
|
104
|
+
"""Get distinct_id based on authentication state.
|
|
105
|
+
|
|
106
|
+
We use principal_id for authenticated users and anon_id for anonymous users.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
str: Principal ID if authenticated, otherwise anon_id
|
|
110
|
+
"""
|
|
111
|
+
data = self.load_or_create()
|
|
112
|
+
|
|
113
|
+
# Check if we have a persisted principal_id first
|
|
114
|
+
linked_principal_id = data.get(KEY_LINKED_PRINCIPAL_ID)
|
|
115
|
+
if linked_principal_id:
|
|
116
|
+
return str(linked_principal_id)
|
|
117
|
+
|
|
118
|
+
# Try to fetch principal_id from API if not persisted
|
|
119
|
+
principal_id = self.get_principal_id()
|
|
120
|
+
if principal_id:
|
|
121
|
+
return principal_id
|
|
122
|
+
|
|
123
|
+
# Fall back to anon_id if not authenticated
|
|
124
|
+
return str(data[KEY_ANON_ID])
|
|
125
|
+
|
|
126
|
+
def get_principal_id(self) -> str | None:
|
|
127
|
+
"""Fetch principal_id from Arcade Cloud API.
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
str | None: Principal ID if authenticated and API call succeeds, None otherwise
|
|
131
|
+
"""
|
|
132
|
+
if not os.path.exists(CREDENTIALS_FILE_PATH):
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
with open(CREDENTIALS_FILE_PATH) as f:
|
|
137
|
+
config = yaml.safe_load(f)
|
|
138
|
+
|
|
139
|
+
cloud_config = config.get("cloud", {})
|
|
140
|
+
api_key = cloud_config.get("api", {}).get("key")
|
|
141
|
+
|
|
142
|
+
if not api_key:
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
response = httpx.get(
|
|
146
|
+
"https://cloud.arcade.dev/api/v1/auth/validate",
|
|
147
|
+
headers={"accept": "application/json", "Authorization": f"Bearer {api_key}"},
|
|
148
|
+
timeout=TIMEOUT_ARCADE_API,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
if response.status_code == 200:
|
|
152
|
+
data = response.json()
|
|
153
|
+
principal_id = data.get("data", {}).get("principal_id")
|
|
154
|
+
return str(principal_id) if principal_id else None
|
|
155
|
+
|
|
156
|
+
except Exception: # noqa: S110
|
|
157
|
+
# Silent failure - don't disrupt CLI
|
|
158
|
+
pass
|
|
159
|
+
|
|
160
|
+
return None
|
|
161
|
+
|
|
162
|
+
def should_alias(self) -> bool:
|
|
163
|
+
"""Check if PostHog alias is needed.
|
|
164
|
+
|
|
165
|
+
Alias is needed when the user is authenticated,
|
|
166
|
+
but the retrieved principal_id doesn't match the persisted linked_principal_id
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
bool: True if user is authenticated but not yet aliased
|
|
170
|
+
"""
|
|
171
|
+
data = self.load_or_create()
|
|
172
|
+
principal_id = self.get_principal_id()
|
|
173
|
+
|
|
174
|
+
return principal_id is not None and principal_id != data.get(KEY_LINKED_PRINCIPAL_ID)
|
|
175
|
+
|
|
176
|
+
def reset_to_anonymous(self) -> None:
|
|
177
|
+
"""Generate new anonymous ID and clear linked principal_id.
|
|
178
|
+
|
|
179
|
+
Used after logout to prevent cross-contamination between multiple
|
|
180
|
+
accounts on the same machine
|
|
181
|
+
"""
|
|
182
|
+
# Create fresh data with only anon_id
|
|
183
|
+
new_data = {KEY_ANON_ID: str(uuid.uuid4()), KEY_LINKED_PRINCIPAL_ID: None}
|
|
184
|
+
|
|
185
|
+
self._write_atomic(new_data)
|
|
186
|
+
self._data = new_data
|
|
187
|
+
|
|
188
|
+
def set_linked_principal_id(self, principal_id: str) -> None:
|
|
189
|
+
"""Update linked_principal_id in usage.json.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
principal_id: The principal_id to link to the current anon_id
|
|
193
|
+
"""
|
|
194
|
+
data = self.load_or_create()
|
|
195
|
+
data[KEY_LINKED_PRINCIPAL_ID] = principal_id
|
|
196
|
+
|
|
197
|
+
self._write_atomic(data)
|
|
198
|
+
self._data = data
|
|
199
|
+
|
|
200
|
+
@property
|
|
201
|
+
def anon_id(self) -> str:
|
|
202
|
+
"""Get the current anonymous ID.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
str: The anonymous ID
|
|
206
|
+
"""
|
|
207
|
+
data = self.load_or_create()
|
|
208
|
+
return str(data[KEY_ANON_ID])
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
from arcade_core.usage.constants import (
|
|
7
|
+
ARCADE_USAGE_EVENT_DATA,
|
|
8
|
+
MAX_RETRIES_POSTHOG,
|
|
9
|
+
TIMEOUT_POSTHOG_ALIAS,
|
|
10
|
+
)
|
|
11
|
+
from arcade_core.usage.utils import is_tracking_enabled
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class UsageService:
|
|
15
|
+
def __init__(self) -> None:
|
|
16
|
+
self.api_key = "phc_hIqUQyJpf2TP4COePO5jEpkGeUXipa7KqTEyDeRsTmB"
|
|
17
|
+
self.host = "https://us.i.posthog.com"
|
|
18
|
+
|
|
19
|
+
def alias(self, previous_id: str, distinct_id: str) -> None:
|
|
20
|
+
"""Perform PostHog alias synchronously (blocking).
|
|
21
|
+
|
|
22
|
+
Must be called BEFORE the first event with the new distinct_id.
|
|
23
|
+
This is done synchronously to guarantee ordering.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
previous_id: The previous distinct_id (usually anon_id)
|
|
27
|
+
distinct_id: The new distinct_id (usually email)
|
|
28
|
+
"""
|
|
29
|
+
if not is_tracking_enabled():
|
|
30
|
+
return
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
from posthog import Posthog
|
|
34
|
+
|
|
35
|
+
posthog = Posthog(
|
|
36
|
+
project_api_key=self.api_key,
|
|
37
|
+
host=self.host,
|
|
38
|
+
timeout=TIMEOUT_POSTHOG_ALIAS,
|
|
39
|
+
max_retries=MAX_RETRIES_POSTHOG,
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
posthog.alias(previous_id=previous_id, distinct_id=distinct_id)
|
|
43
|
+
posthog.flush()
|
|
44
|
+
except Exception: # noqa: S110
|
|
45
|
+
# Silent failure - don't disrupt CLI
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
def capture(
|
|
49
|
+
self, event_name: str, distinct_id: str, properties: dict, is_anon: bool = False
|
|
50
|
+
) -> None:
|
|
51
|
+
"""Capture event in a detached subprocess that is non-blocking.
|
|
52
|
+
|
|
53
|
+
Spawns a completely independent subprocess that continues running
|
|
54
|
+
even after the parent CLI process exits. Works cross-platform.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
event_name: Name of the event to capture
|
|
58
|
+
distinct_id: The distinct_id for the user
|
|
59
|
+
properties: Event properties
|
|
60
|
+
is_anon: Whether this is an anonymous user (sets $process_person_profile to false)
|
|
61
|
+
"""
|
|
62
|
+
if not is_tracking_enabled():
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
event_data = json.dumps({
|
|
66
|
+
"event_name": event_name,
|
|
67
|
+
"properties": properties,
|
|
68
|
+
"distinct_id": distinct_id,
|
|
69
|
+
"api_key": self.api_key,
|
|
70
|
+
"host": self.host,
|
|
71
|
+
"is_anon": is_anon,
|
|
72
|
+
})
|
|
73
|
+
|
|
74
|
+
cmd = [sys.executable, "-m", "arcade_core.usage"]
|
|
75
|
+
|
|
76
|
+
# Pass data via environment variable (works on all platforms)
|
|
77
|
+
env = os.environ.copy()
|
|
78
|
+
env[ARCADE_USAGE_EVENT_DATA] = event_data
|
|
79
|
+
|
|
80
|
+
if sys.platform == "win32":
|
|
81
|
+
# Windows: Use DETACHED_PROCESS to fully detach from parent console
|
|
82
|
+
DETACHED_PROCESS = 0x00000008
|
|
83
|
+
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
|
84
|
+
|
|
85
|
+
subprocess.Popen(
|
|
86
|
+
cmd,
|
|
87
|
+
stdin=subprocess.DEVNULL,
|
|
88
|
+
stdout=subprocess.DEVNULL,
|
|
89
|
+
stderr=subprocess.DEVNULL,
|
|
90
|
+
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
|
91
|
+
close_fds=True,
|
|
92
|
+
env=env,
|
|
93
|
+
)
|
|
94
|
+
else:
|
|
95
|
+
# Unix: Use start_new_session to detach from terminal
|
|
96
|
+
subprocess.Popen(
|
|
97
|
+
cmd,
|
|
98
|
+
stdin=subprocess.DEVNULL,
|
|
99
|
+
stdout=subprocess.DEVNULL,
|
|
100
|
+
stderr=subprocess.DEVNULL,
|
|
101
|
+
start_new_session=True,
|
|
102
|
+
close_fds=True,
|
|
103
|
+
env=env,
|
|
104
|
+
)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from arcade_core.usage.constants import ARCADE_USAGE_TRACKING
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def is_tracking_enabled() -> bool:
|
|
7
|
+
"""Check if usage tracking is enabled via environment variable.
|
|
8
|
+
|
|
9
|
+
Returns:
|
|
10
|
+
bool: True if tracking is enabled (default), False if explicitly disabled.
|
|
11
|
+
"""
|
|
12
|
+
value = os.environ.get(ARCADE_USAGE_TRACKING, "1")
|
|
13
|
+
return value.lower() not in ("false", "0", "no", "off")
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
arcade_core/__init__.py,sha256=1heu3AROAjpistehPzY2H-2nkj_IjQEh-vVlVOCRF1E,88
|
|
2
2
|
arcade_core/annotations.py,sha256=Nst6aejLWXlpTu7GwzWETu1gQCG1XVAUR_qcFbNvyRc,198
|
|
3
3
|
arcade_core/auth.py,sha256=On9sJPOxvHjKBxgKC1yqp7oijF6KYBsG6fG8KUw-9OY,5882
|
|
4
|
-
arcade_core/catalog.py,sha256=
|
|
4
|
+
arcade_core/catalog.py,sha256=yuGYai8b-ruN4Ot5p8DyRMOSoxy-ciVgL56hQMJSZrk,41814
|
|
5
5
|
arcade_core/config.py,sha256=e98XQAkYySGW9T_yrJg54BB8Wuq06GPVHp7xqe2d1vU,572
|
|
6
6
|
arcade_core/config_model.py,sha256=78BR6Ch9BDuG4ddWGfpuEKqWcb1fyOF6kxiF4qLFogM,4481
|
|
7
|
+
arcade_core/constants.py,sha256=wakdklI7TyJ0agq9n-Cmb2lbVa95D0oUaMGm30eiv9Y,375
|
|
7
8
|
arcade_core/context.py,sha256=J2MgbVznhJC2qarHq3dTL72W4NGYOM1pjXdI_YwgkA4,3316
|
|
8
9
|
arcade_core/discovery.py,sha256=PluKGhNtJ7RYjJuPDMB8LCNinQLKzlqoAtc3dwKb6IA,8397
|
|
9
10
|
arcade_core/errors.py,sha256=fsi7m6TQQSsdSNHl4rBoSN_YH3ZV910gjvBFqB207f4,13326
|
|
@@ -16,6 +17,12 @@ arcade_core/toolkit.py,sha256=UcZ151pC8zfIFzVMYxaq31H7M0f-2qprU0PkVAzfRtI,13815
|
|
|
16
17
|
arcade_core/utils.py,sha256=RxVIzURTtZ4nAWYB3FYGngqMMPmBBxf330Ez9eEoXaw,3109
|
|
17
18
|
arcade_core/version.py,sha256=CpXi3jGlx23RvRyU7iytOMZrnspdWw4yofS8lpP1AJU,18
|
|
18
19
|
arcade_core/converters/openai.py,sha256=4efdgTkvdwT44VGStBhdUmzCnoP5dysceIqPVVPG-vk,7408
|
|
19
|
-
arcade_core
|
|
20
|
-
arcade_core
|
|
21
|
-
arcade_core
|
|
20
|
+
arcade_core/usage/__init__.py,sha256=SUR5mqF-bjdbl-P-OOHN6OFAjXZu4agXyPhr7xdVXCw,234
|
|
21
|
+
arcade_core/usage/__main__.py,sha256=rSJkE1G9hlV3HRRA6EJE5Lmy3wKyan7rAxBXHX9A1cI,1577
|
|
22
|
+
arcade_core/usage/constants.py,sha256=1FQIhkFFMZUhU-H4A7GvMb7KQ3qLFrNAZb2-LEvSF3k,1052
|
|
23
|
+
arcade_core/usage/identity.py,sha256=2dP1iusI9pE_GrPlz3VXEdz51R5JlNo9_-OXbe6vn7I,6716
|
|
24
|
+
arcade_core/usage/usage_service.py,sha256=xzWWSEktm58liiNYugBHRactSru8V5foriHcsoH0j1A,3407
|
|
25
|
+
arcade_core/usage/utils.py,sha256=FqBOmlhwT68cbnpI5Vx9ZW6vLRYPVg4FJ0GaMEp8qEM,398
|
|
26
|
+
arcade_core-3.1.0.dist-info/METADATA,sha256=KcL3pN8hxkmsFCcTcqqNMrQ_kDGI1EcrQ3Rr6Hk35Xw,2383
|
|
27
|
+
arcade_core-3.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
28
|
+
arcade_core-3.1.0.dist-info/RECORD,,
|
|
File without changes
|