lr-gladiator 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lr-gladiator might be problematic. Click here for more details.
- gladiator/__init__.py +7 -0
- gladiator/arena.py +1119 -0
- gladiator/checksums.py +31 -0
- gladiator/cli.py +433 -0
- gladiator/config.py +60 -0
- lr_gladiator-0.12.0.dist-info/METADATA +198 -0
- lr_gladiator-0.12.0.dist-info/RECORD +11 -0
- lr_gladiator-0.12.0.dist-info/WHEEL +5 -0
- lr_gladiator-0.12.0.dist-info/entry_points.txt +2 -0
- lr_gladiator-0.12.0.dist-info/licenses/LICENSE +25 -0
- lr_gladiator-0.12.0.dist-info/top_level.txt +1 -0
gladiator/arena.py
ADDED
|
@@ -0,0 +1,1119 @@
|
|
|
1
|
+
#! /usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# src/gladiator/arena.py
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
import subprocess
|
|
6
|
+
import shlex
|
|
7
|
+
import os
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Dict, List, Optional, Tuple
|
|
10
|
+
import requests
|
|
11
|
+
from .config import LoginConfig
|
|
12
|
+
from .checksums import sha256_file
|
|
13
|
+
import hashlib
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ArenaError(RuntimeError):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ArenaClient:
|
|
21
|
+
def __init__(self, cfg: LoginConfig):
|
|
22
|
+
self.cfg = cfg
|
|
23
|
+
self.session = requests.Session()
|
|
24
|
+
self.session.verify = cfg.verify_tls
|
|
25
|
+
# Default headers: explicitly request/submit JSON
|
|
26
|
+
self.session.headers.update(
|
|
27
|
+
{
|
|
28
|
+
"Accept": "application/json",
|
|
29
|
+
"Content-Type": "application/json",
|
|
30
|
+
"User-Agent": "gladiator-arena/0.1",
|
|
31
|
+
"Arena-Usage-Reason": cfg.reason or "gladiator/cli",
|
|
32
|
+
}
|
|
33
|
+
)
|
|
34
|
+
if cfg.arena_session_id:
|
|
35
|
+
self.session.headers.update({"arena_session_id": cfg.arena_session_id})
|
|
36
|
+
|
|
37
|
+
self._debug = bool(int(os.environ.get("GLADIATOR_DEBUG", "0")))
|
|
38
|
+
|
|
39
|
+
# ---------- Utilities ----------
|
|
40
|
+
def _ensure_json(self, resp: requests.Response):
|
|
41
|
+
ctype = resp.headers.get("Content-Type", "").lower()
|
|
42
|
+
if "application/json" not in ctype:
|
|
43
|
+
snippet = resp.text[:400].replace("", " ")
|
|
44
|
+
raise ArenaError(
|
|
45
|
+
f"Expected JSON but got '{ctype or 'unknown'}' from {resp.url}. "
|
|
46
|
+
f"Status {resp.status_code}. Body starts with: {snippet}"
|
|
47
|
+
)
|
|
48
|
+
try:
|
|
49
|
+
return resp.json()
|
|
50
|
+
except Exception as e:
|
|
51
|
+
raise ArenaError(f"Failed to parse JSON from {resp.url}: {e}") from e
|
|
52
|
+
|
|
53
|
+
def _log(self, msg: str):
|
|
54
|
+
if self._debug:
|
|
55
|
+
print(f"[gladiator debug] {msg}")
|
|
56
|
+
|
|
57
|
+
def _try_json(self, resp: requests.Response) -> Optional[dict]:
|
|
58
|
+
"""Best-effort JSON parse. Returns None if not JSON or parse fails."""
|
|
59
|
+
ctype = resp.headers.get("Content-Type", "").lower()
|
|
60
|
+
if "application/json" not in ctype:
|
|
61
|
+
return None
|
|
62
|
+
try:
|
|
63
|
+
data = resp.json()
|
|
64
|
+
return data if isinstance(data, dict) else {"data": data}
|
|
65
|
+
except Exception:
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
# --- version picking helpers ---
|
|
69
|
+
@staticmethod
|
|
70
|
+
def _logical_key(f: Dict) -> str:
|
|
71
|
+
# Prefer any group-level id; fall back to normalized filename
|
|
72
|
+
return (
|
|
73
|
+
f.get("attachmentGroupGuid")
|
|
74
|
+
or f.get("attachmentGroupId")
|
|
75
|
+
or f.get("attachmentGuid")
|
|
76
|
+
or (f.get("name") or f.get("filename") or "").lower()
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
@staticmethod
|
|
80
|
+
def _version_of(f: Dict) -> int:
|
|
81
|
+
for k in ("version", "fileVersion", "versionNumber", "rev", "revision"):
|
|
82
|
+
v = f.get(k)
|
|
83
|
+
if v is None:
|
|
84
|
+
continue
|
|
85
|
+
try:
|
|
86
|
+
return int(v)
|
|
87
|
+
except Exception:
|
|
88
|
+
if isinstance(v, str) and len(v) == 1 and v.isalpha():
|
|
89
|
+
return ord(v.upper()) - 64 # A->1
|
|
90
|
+
return -1
|
|
91
|
+
|
|
92
|
+
@staticmethod
|
|
93
|
+
def _timestamp_of(f: Dict):
|
|
94
|
+
from datetime import datetime
|
|
95
|
+
from email.utils import parsedate_to_datetime
|
|
96
|
+
|
|
97
|
+
for k in (
|
|
98
|
+
"modifiedAt",
|
|
99
|
+
"updatedAt",
|
|
100
|
+
"lastModified",
|
|
101
|
+
"lastModifiedDate",
|
|
102
|
+
"effectiveDate",
|
|
103
|
+
"createdAt",
|
|
104
|
+
):
|
|
105
|
+
s = f.get(k)
|
|
106
|
+
if not s:
|
|
107
|
+
continue
|
|
108
|
+
try:
|
|
109
|
+
return datetime.fromisoformat(s.replace("Z", "+00:00"))
|
|
110
|
+
except Exception:
|
|
111
|
+
try:
|
|
112
|
+
return parsedate_to_datetime(s)
|
|
113
|
+
except Exception:
|
|
114
|
+
continue
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
def _latest_files(self, files: List[Dict]) -> List[Dict]:
|
|
118
|
+
best: Dict[str, Dict] = {}
|
|
119
|
+
for f in files:
|
|
120
|
+
key = self._logical_key(f)
|
|
121
|
+
if not key:
|
|
122
|
+
continue
|
|
123
|
+
score = (self._version_of(f), self._timestamp_of(f) or 0)
|
|
124
|
+
prev = best.get(key)
|
|
125
|
+
if not prev:
|
|
126
|
+
f["_score"] = score
|
|
127
|
+
best[key] = f
|
|
128
|
+
continue
|
|
129
|
+
if score > prev.get("_score", (-1, 0)):
|
|
130
|
+
f["_score"] = score
|
|
131
|
+
best[key] = f
|
|
132
|
+
out = []
|
|
133
|
+
for v in best.values():
|
|
134
|
+
v.pop("_score", None)
|
|
135
|
+
out.append(v)
|
|
136
|
+
return out
|
|
137
|
+
|
|
138
|
+
# ---------- Public high-level methods ----------
|
|
139
|
+
def get_latest_approved_revision(self, item_number: str) -> str:
|
|
140
|
+
return self._api_get_latest_approved(item_number)
|
|
141
|
+
|
|
142
|
+
def list_files(
|
|
143
|
+
self, item_number: str, revision: Optional[str] = None
|
|
144
|
+
) -> List[Dict]:
|
|
145
|
+
target_guid = self._api_resolve_revision_guid(
|
|
146
|
+
item_number, revision or "EFFECTIVE"
|
|
147
|
+
)
|
|
148
|
+
raw = self._api_list_files_by_item_guid(target_guid)
|
|
149
|
+
return self._latest_files(raw)
|
|
150
|
+
|
|
151
|
+
def download_files(
|
|
152
|
+
self,
|
|
153
|
+
item_number: str,
|
|
154
|
+
revision: Optional[str] = None,
|
|
155
|
+
out_dir: Path = Path("."),
|
|
156
|
+
) -> List[Path]:
|
|
157
|
+
files = self.list_files(item_number, revision)
|
|
158
|
+
out_dir.mkdir(parents=True, exist_ok=True)
|
|
159
|
+
downloaded: List[Path] = []
|
|
160
|
+
for f in files:
|
|
161
|
+
# Skip associations with no blob
|
|
162
|
+
if not f.get("haveContent", True):
|
|
163
|
+
self._log(
|
|
164
|
+
f"Skip {item_number}: file {f.get('filename')} has no content"
|
|
165
|
+
)
|
|
166
|
+
continue
|
|
167
|
+
|
|
168
|
+
url = f.get("downloadUrl") or f.get("url")
|
|
169
|
+
filename = f.get("filename") or f.get("name")
|
|
170
|
+
if not url or not filename:
|
|
171
|
+
continue
|
|
172
|
+
|
|
173
|
+
p = out_dir / filename
|
|
174
|
+
try:
|
|
175
|
+
with self.session.get(
|
|
176
|
+
url,
|
|
177
|
+
stream=True,
|
|
178
|
+
headers={"arena_session_id": self.cfg.arena_session_id or ""},
|
|
179
|
+
) as r:
|
|
180
|
+
# If the blob is missing/forbidden, don’t abort the whole command
|
|
181
|
+
if r.status_code in (400, 403, 404):
|
|
182
|
+
self._log(
|
|
183
|
+
f"Skip {item_number}: {filename} content unavailable "
|
|
184
|
+
f"(HTTP {r.status_code})"
|
|
185
|
+
)
|
|
186
|
+
continue
|
|
187
|
+
r.raise_for_status()
|
|
188
|
+
with open(p, "wb") as fh:
|
|
189
|
+
for chunk in r.iter_content(128 * 1024):
|
|
190
|
+
fh.write(chunk)
|
|
191
|
+
downloaded.append(p)
|
|
192
|
+
except requests.HTTPError as e:
|
|
193
|
+
# Be resilient: log and continue
|
|
194
|
+
self._log(f"Download failed for {filename}: {e}")
|
|
195
|
+
continue
|
|
196
|
+
return downloaded
|
|
197
|
+
|
|
198
|
+
def download_files_recursive(
|
|
199
|
+
self,
|
|
200
|
+
item_number: str,
|
|
201
|
+
revision: Optional[str] = None,
|
|
202
|
+
out_dir: Path = Path("."),
|
|
203
|
+
*,
|
|
204
|
+
max_depth: Optional[int] = None,
|
|
205
|
+
) -> List[Path]:
|
|
206
|
+
"""
|
|
207
|
+
Download files for `item_number` AND, recursively, for all subassemblies
|
|
208
|
+
discovered via the BOM. Each child item is placed under a subdirectory:
|
|
209
|
+
<out_dir>/<child_item_number>/
|
|
210
|
+
Root files go directly in <out_dir>/.
|
|
211
|
+
|
|
212
|
+
Depth semantics match `get_bom(..., recursive=True, max_depth=...)`.
|
|
213
|
+
"""
|
|
214
|
+
# Ensure the root directory exists
|
|
215
|
+
out_dir.mkdir(parents=True, exist_ok=True)
|
|
216
|
+
|
|
217
|
+
downloaded: List[Path] = []
|
|
218
|
+
# 1) Download files for the root item into out_dir
|
|
219
|
+
downloaded.extend(self.download_files(item_number, revision, out_dir=out_dir))
|
|
220
|
+
|
|
221
|
+
# 2) Expand BOM recursively and collect unique child item numbers
|
|
222
|
+
lines = self.get_bom(
|
|
223
|
+
item_number,
|
|
224
|
+
revision,
|
|
225
|
+
recursive=True,
|
|
226
|
+
max_depth=max_depth,
|
|
227
|
+
)
|
|
228
|
+
seen_children = set()
|
|
229
|
+
for ln in lines:
|
|
230
|
+
child_num = (ln or {}).get("itemNumber")
|
|
231
|
+
if not child_num or child_num == item_number:
|
|
232
|
+
continue
|
|
233
|
+
if child_num in seen_children:
|
|
234
|
+
continue
|
|
235
|
+
seen_children.add(child_num)
|
|
236
|
+
|
|
237
|
+
# Place each child's files under <out_dir>/<child_num>/
|
|
238
|
+
child_dir = out_dir / child_num
|
|
239
|
+
downloaded.extend(
|
|
240
|
+
self.download_files(child_num, revision, out_dir=child_dir)
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
return downloaded
|
|
244
|
+
|
|
245
|
+
def upload_file_to_working(
|
|
246
|
+
self,
|
|
247
|
+
item_number: str,
|
|
248
|
+
file_path: Path,
|
|
249
|
+
reference: Optional[str] = None,
|
|
250
|
+
*,
|
|
251
|
+
title: Optional[str] = None,
|
|
252
|
+
category_name: str = "CAD Data",
|
|
253
|
+
file_format: Optional[str] = None,
|
|
254
|
+
description: Optional[str] = None,
|
|
255
|
+
primary: bool = True,
|
|
256
|
+
latest_edition_association: bool = True,
|
|
257
|
+
edition: str = None,
|
|
258
|
+
) -> Dict:
|
|
259
|
+
"""
|
|
260
|
+
Update-if-exists-else-create semantics:
|
|
261
|
+
1) Resolve EFFECTIVE GUID from item number
|
|
262
|
+
2) Resolve WORKING revision GUID (fail if none)
|
|
263
|
+
3) Find existing file by title orexact filename (WORKING first, then EFFECTIVE)
|
|
264
|
+
- If found: POST /files/{fileGuid}/content (multipart)
|
|
265
|
+
- Else: POST /items/{workingGuid}/files (multipart) with file.edition
|
|
266
|
+
"""
|
|
267
|
+
return self._api_upload_or_update_file(
|
|
268
|
+
item_number=item_number,
|
|
269
|
+
file_path=file_path,
|
|
270
|
+
reference=reference,
|
|
271
|
+
title=title,
|
|
272
|
+
category_name=category_name,
|
|
273
|
+
file_format=file_format,
|
|
274
|
+
description=description,
|
|
275
|
+
primary=primary,
|
|
276
|
+
latest_edition_association=latest_edition_association,
|
|
277
|
+
edition=edition,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
def get_bom(
|
|
281
|
+
self,
|
|
282
|
+
item_number: str,
|
|
283
|
+
revision: Optional[str] = None,
|
|
284
|
+
*,
|
|
285
|
+
recursive: bool = False,
|
|
286
|
+
max_depth: Optional[int] = None,
|
|
287
|
+
) -> List[Dict]:
|
|
288
|
+
"""
|
|
289
|
+
Return a normalized list of BOM lines for the given item.
|
|
290
|
+
|
|
291
|
+
By default this fetches the EFFECTIVE (approved) revision's BOM.
|
|
292
|
+
Use revision="WORKING" or a specific label (e.g., "B2") to override.
|
|
293
|
+
|
|
294
|
+
If recursive=True, expand subassemblies depth-first. max_depth limits the recursion
|
|
295
|
+
depth (1 = only direct children). If omitted, recursion is unlimited.
|
|
296
|
+
"""
|
|
297
|
+
selector = (revision or "EFFECTIVE").strip()
|
|
298
|
+
out: List[Dict] = []
|
|
299
|
+
self._bom_expand(
|
|
300
|
+
root_item=item_number,
|
|
301
|
+
selector=selector,
|
|
302
|
+
out=out,
|
|
303
|
+
recursive=recursive,
|
|
304
|
+
max_depth=max_depth,
|
|
305
|
+
_level=0,
|
|
306
|
+
_seen=set(),
|
|
307
|
+
)
|
|
308
|
+
return out
|
|
309
|
+
|
|
310
|
+
# === Internal: single fetch + normalization (your original logic) ===
|
|
311
|
+
|
|
312
|
+
def _fetch_bom_normalized(self, item_number: str, selector: str) -> List[Dict]:
|
|
313
|
+
"""
|
|
314
|
+
Fetch and normalize the BOM for item_number with the given revision selector.
|
|
315
|
+
Falls back WORKING -> EFFECTIVE if selector is WORKING and no WORKING exists.
|
|
316
|
+
"""
|
|
317
|
+
# 1) Resolve the exact revision GUID we want the BOM for
|
|
318
|
+
try:
|
|
319
|
+
target_guid = self._api_resolve_revision_guid(item_number, selector)
|
|
320
|
+
except ArenaError:
|
|
321
|
+
if selector.strip().upper() == "WORKING":
|
|
322
|
+
# fallback: try EFFECTIVE for children that don't have a WORKING revision
|
|
323
|
+
target_guid = self._api_resolve_revision_guid(item_number, "EFFECTIVE")
|
|
324
|
+
else:
|
|
325
|
+
raise
|
|
326
|
+
|
|
327
|
+
# 2) GET /items/{guid}/bom
|
|
328
|
+
url = f"{self._api_base()}/items/{target_guid}/bom"
|
|
329
|
+
self._log(f"GET {url}")
|
|
330
|
+
r = self.session.get(url)
|
|
331
|
+
r.raise_for_status()
|
|
332
|
+
data = self._ensure_json(r)
|
|
333
|
+
|
|
334
|
+
rows = data.get("results", data if isinstance(data, list) else [])
|
|
335
|
+
norm: List[Dict] = []
|
|
336
|
+
for row in rows:
|
|
337
|
+
itm = row.get("item", {}) if isinstance(row, dict) else {}
|
|
338
|
+
norm.append(
|
|
339
|
+
{
|
|
340
|
+
# association/line
|
|
341
|
+
"guid": row.get("guid"),
|
|
342
|
+
"lineNumber": row.get("lineNumber"),
|
|
343
|
+
"notes": row.get("notes"),
|
|
344
|
+
"quantity": row.get("quantity"),
|
|
345
|
+
"refDes": row.get("refDes")
|
|
346
|
+
or row.get("referenceDesignators")
|
|
347
|
+
or "",
|
|
348
|
+
# child item
|
|
349
|
+
"itemGuid": itm.get("guid") or itm.get("id"),
|
|
350
|
+
"itemNumber": itm.get("number"),
|
|
351
|
+
"itemName": itm.get("name"),
|
|
352
|
+
"itemRevision": itm.get("revisionNumber"),
|
|
353
|
+
"itemRevisionStatus": itm.get("revisionStatus"),
|
|
354
|
+
"itemUrl": (itm.get("url") or {}).get("api"),
|
|
355
|
+
"itemAppUrl": (itm.get("url") or {}).get("app"),
|
|
356
|
+
}
|
|
357
|
+
)
|
|
358
|
+
return norm
|
|
359
|
+
|
|
360
|
+
# === Internal: recursive expansion ===
|
|
361
|
+
|
|
362
|
+
def _bom_expand(
|
|
363
|
+
self,
|
|
364
|
+
*,
|
|
365
|
+
root_item: str,
|
|
366
|
+
selector: str,
|
|
367
|
+
out: List[Dict],
|
|
368
|
+
recursive: bool,
|
|
369
|
+
max_depth: Optional[int],
|
|
370
|
+
_level: int,
|
|
371
|
+
_seen: set,
|
|
372
|
+
) -> None:
|
|
373
|
+
# avoid cycles
|
|
374
|
+
if root_item in _seen:
|
|
375
|
+
return
|
|
376
|
+
_seen.add(root_item)
|
|
377
|
+
|
|
378
|
+
rows = self._fetch_bom_normalized(root_item, selector)
|
|
379
|
+
|
|
380
|
+
# attach level and parentNumber (useful in JSON + for debugging)
|
|
381
|
+
for r in rows:
|
|
382
|
+
r["level"] = _level
|
|
383
|
+
r["parentNumber"] = root_item
|
|
384
|
+
out.append(r)
|
|
385
|
+
|
|
386
|
+
if not recursive:
|
|
387
|
+
return
|
|
388
|
+
|
|
389
|
+
# depth check: if max_depth=1, only expand children once (level 0 -> level 1)
|
|
390
|
+
if max_depth is not None and _level >= max_depth:
|
|
391
|
+
return
|
|
392
|
+
|
|
393
|
+
# expand each child that looks like an assembly (if it has a BOM; empty BOM is okay)
|
|
394
|
+
for r in rows:
|
|
395
|
+
child_num = r.get("itemNumber")
|
|
396
|
+
if not child_num:
|
|
397
|
+
continue
|
|
398
|
+
try:
|
|
399
|
+
# Recurse; keep same selector, with WORKING->EFFECTIVE fallback handled in _fetch_bom_normalized
|
|
400
|
+
self._bom_expand(
|
|
401
|
+
root_item=child_num,
|
|
402
|
+
selector=selector,
|
|
403
|
+
out=out,
|
|
404
|
+
recursive=True,
|
|
405
|
+
max_depth=max_depth,
|
|
406
|
+
_level=_level + 1,
|
|
407
|
+
_seen=_seen,
|
|
408
|
+
)
|
|
409
|
+
except ArenaError:
|
|
410
|
+
# Child might not have a BOM; skip silently
|
|
411
|
+
continue
|
|
412
|
+
|
|
413
|
+
def _api_base(self) -> str:
|
|
414
|
+
return self.cfg.base_url.rstrip("/")
|
|
415
|
+
|
|
416
|
+
def _api_get_latest_approved(self, item_number: str) -> str:
|
|
417
|
+
item_guid = self._api_resolve_item_guid(item_number)
|
|
418
|
+
url = f"{self._api_base()}/items/{item_guid}/revisions"
|
|
419
|
+
self._log(f"GET {url}")
|
|
420
|
+
r = self.session.get(url)
|
|
421
|
+
if r.status_code == 404:
|
|
422
|
+
raise ArenaError(f"Item {item_number} not found")
|
|
423
|
+
r.raise_for_status()
|
|
424
|
+
data = self._ensure_json(r)
|
|
425
|
+
revs = data.get("results", data if isinstance(data, list) else [])
|
|
426
|
+
if not isinstance(revs, list):
|
|
427
|
+
raise ArenaError(f"Unexpected revisions payload for item {item_number}")
|
|
428
|
+
|
|
429
|
+
# Arena marks the currently effective (approved) revision as:
|
|
430
|
+
# - revisionStatus == "EFFECTIVE" (string)
|
|
431
|
+
# - OR status == 1 (numeric)
|
|
432
|
+
effective = [
|
|
433
|
+
rv
|
|
434
|
+
for rv in revs
|
|
435
|
+
if (str(rv.get("revisionStatus") or "").upper() == "EFFECTIVE")
|
|
436
|
+
or (rv.get("status") == 1)
|
|
437
|
+
]
|
|
438
|
+
if not effective:
|
|
439
|
+
raise ArenaError(f"No approved/released revisions for item {item_number}")
|
|
440
|
+
|
|
441
|
+
# Prefer the one that is not superseded; otherwise fall back to the most recently superseded.
|
|
442
|
+
current = next(
|
|
443
|
+
(rv for rv in effective if not rv.get("supersededDateTime")), None
|
|
444
|
+
)
|
|
445
|
+
if not current:
|
|
446
|
+
# sort by supersededDateTime (None last) then by number/name as a stable tie-breaker
|
|
447
|
+
def _sd(rv):
|
|
448
|
+
dt = rv.get("supersededDateTime")
|
|
449
|
+
return dt or "0000-00-00T00:00:00Z"
|
|
450
|
+
|
|
451
|
+
effective.sort(key=_sd)
|
|
452
|
+
current = effective[-1]
|
|
453
|
+
|
|
454
|
+
# The human-visible revision is under "number" (e.g., "B3"); fall back defensively.
|
|
455
|
+
rev_label = (
|
|
456
|
+
current.get("number") or current.get("name") or current.get("revision")
|
|
457
|
+
)
|
|
458
|
+
if not rev_label:
|
|
459
|
+
raise ArenaError(
|
|
460
|
+
f"Could not determine revision label for item {item_number}"
|
|
461
|
+
)
|
|
462
|
+
return rev_label
|
|
463
|
+
|
|
464
|
+
def _api_list_files(self, item_number: str) -> List[Dict]:
|
|
465
|
+
item_guid = self._api_resolve_item_guid(item_number)
|
|
466
|
+
url = f"{self._api_base()}/items/{item_guid}/files"
|
|
467
|
+
self._log(f"GET {url}")
|
|
468
|
+
r = self.session.get(url)
|
|
469
|
+
r.raise_for_status()
|
|
470
|
+
data = self._ensure_json(r)
|
|
471
|
+
rows = data.get("results", data if isinstance(data, list) else [])
|
|
472
|
+
norm: List[Dict] = []
|
|
473
|
+
for row in rows:
|
|
474
|
+
f = row.get("file", {}) if isinstance(row, dict) else {}
|
|
475
|
+
file_guid = f.get("guid") or f.get("id")
|
|
476
|
+
norm.append(
|
|
477
|
+
{
|
|
478
|
+
"id": row.get("guid") or row.get("id"),
|
|
479
|
+
"fileGuid": file_guid,
|
|
480
|
+
"name": f.get("name") or f.get("title"),
|
|
481
|
+
"title": f.get("title"),
|
|
482
|
+
"filename": f.get("name") or f.get("title"),
|
|
483
|
+
"size": f.get("size"),
|
|
484
|
+
"haveContent": f.get("haveContent", True),
|
|
485
|
+
"downloadUrl": (
|
|
486
|
+
f"{self._api_base()}/files/{file_guid}/content"
|
|
487
|
+
if file_guid
|
|
488
|
+
else None
|
|
489
|
+
),
|
|
490
|
+
"edition": f.get("edition"),
|
|
491
|
+
"updatedAt": f.get("lastModifiedDateTime")
|
|
492
|
+
or f.get("lastModifiedDate")
|
|
493
|
+
or f.get("creationDateTime"),
|
|
494
|
+
"attachmentGroupGuid": row.get("guid"),
|
|
495
|
+
}
|
|
496
|
+
)
|
|
497
|
+
return norm
|
|
498
|
+
|
|
499
|
+
def _api_resolve_revision_guid(self, item_number: str, selector: str | None) -> str:
|
|
500
|
+
"""Return the item GUID for the requested revision selector."""
|
|
501
|
+
# Resolve base item (effective) guid from number
|
|
502
|
+
effective_guid = self._api_resolve_item_guid(item_number)
|
|
503
|
+
|
|
504
|
+
# If no selector, we default to EFFECTIVE
|
|
505
|
+
sel = (selector or "EFFECTIVE").strip().upper()
|
|
506
|
+
|
|
507
|
+
# Fetch revisions
|
|
508
|
+
url = f"{self._api_base()}/items/{effective_guid}/revisions"
|
|
509
|
+
self._log(f"GET {url}")
|
|
510
|
+
r = self.session.get(url)
|
|
511
|
+
r.raise_for_status()
|
|
512
|
+
data = self._ensure_json(r)
|
|
513
|
+
revs = data.get("results", data if isinstance(data, list) else [])
|
|
514
|
+
|
|
515
|
+
def pick(pred):
|
|
516
|
+
for rv in revs:
|
|
517
|
+
if pred(rv):
|
|
518
|
+
return rv.get("guid")
|
|
519
|
+
return None
|
|
520
|
+
|
|
521
|
+
# Named selectors
|
|
522
|
+
if sel in {"WORKING"}:
|
|
523
|
+
guid = pick(
|
|
524
|
+
lambda rv: (rv.get("revisionStatus") or "").upper() == "WORKING"
|
|
525
|
+
or rv.get("status") == 0
|
|
526
|
+
)
|
|
527
|
+
if not guid:
|
|
528
|
+
raise ArenaError("No WORKING revision exists for this item.")
|
|
529
|
+
return guid
|
|
530
|
+
|
|
531
|
+
if sel in {"EFFECTIVE", "APPROVED", "RELEASED"}:
|
|
532
|
+
# Prefer the one not superseded
|
|
533
|
+
eff = [
|
|
534
|
+
rv
|
|
535
|
+
for rv in revs
|
|
536
|
+
if (rv.get("revisionStatus") or "").upper() == "EFFECTIVE"
|
|
537
|
+
or rv.get("status") == 1
|
|
538
|
+
]
|
|
539
|
+
if not eff:
|
|
540
|
+
raise ArenaError(
|
|
541
|
+
"No approved/effective revision exists for this item. Try using revision 'WORKING'."
|
|
542
|
+
)
|
|
543
|
+
current = next(
|
|
544
|
+
(rv for rv in eff if not rv.get("supersededDateTime")), eff[-1]
|
|
545
|
+
)
|
|
546
|
+
return current.get("guid")
|
|
547
|
+
|
|
548
|
+
# Specific label (e.g., "A", "B2")
|
|
549
|
+
guid = pick(
|
|
550
|
+
lambda rv: (rv.get("number") or rv.get("name"))
|
|
551
|
+
and str(rv.get("number") or rv.get("name")).upper() == sel
|
|
552
|
+
)
|
|
553
|
+
if not guid:
|
|
554
|
+
raise ArenaError(f'Revision "{selector}" not found for item {item_number}.')
|
|
555
|
+
return guid
|
|
556
|
+
|
|
557
|
+
def _api_list_files_by_item_guid(self, item_guid: str) -> list[dict]:
|
|
558
|
+
url = f"{self._api_base()}/items/{item_guid}/files"
|
|
559
|
+
self._log(f"GET {url}")
|
|
560
|
+
r = self.session.get(url)
|
|
561
|
+
r.raise_for_status()
|
|
562
|
+
data = self._ensure_json(r)
|
|
563
|
+
rows = data.get("results", data if isinstance(data, list) else [])
|
|
564
|
+
# … keep existing normalization from _api_list_files() …
|
|
565
|
+
norm = []
|
|
566
|
+
for row in rows:
|
|
567
|
+
f = row.get("file", {}) if isinstance(row, dict) else {}
|
|
568
|
+
file_guid = f.get("guid") or f.get("id")
|
|
569
|
+
norm.append(
|
|
570
|
+
{
|
|
571
|
+
"id": row.get("guid") or row.get("id"),
|
|
572
|
+
"fileGuid": file_guid,
|
|
573
|
+
"title": f.get("title"),
|
|
574
|
+
"name": f.get("name"),
|
|
575
|
+
"filename": f.get("name"),
|
|
576
|
+
"size": f.get("size"),
|
|
577
|
+
"haveContent": f.get("haveContent", True),
|
|
578
|
+
"downloadUrl": (
|
|
579
|
+
f"{self._api_base()}/files/{file_guid}/content"
|
|
580
|
+
if file_guid
|
|
581
|
+
else None
|
|
582
|
+
),
|
|
583
|
+
"edition": f.get("edition"),
|
|
584
|
+
"updatedAt": f.get("lastModifiedDateTime")
|
|
585
|
+
or f.get("lastModifiedDate")
|
|
586
|
+
or f.get("creationDateTime"),
|
|
587
|
+
"attachmentGroupGuid": row.get("guid"),
|
|
588
|
+
"storageMethodName": (
|
|
589
|
+
f.get("storageMethodName") or f.get("storageMethod")
|
|
590
|
+
),
|
|
591
|
+
"location": f.get("location"),
|
|
592
|
+
}
|
|
593
|
+
)
|
|
594
|
+
return norm
|
|
595
|
+
|
|
596
|
+
def _api_upload_or_update_file(
|
|
597
|
+
self,
|
|
598
|
+
*,
|
|
599
|
+
item_number: str,
|
|
600
|
+
file_path: Path,
|
|
601
|
+
reference: Optional[str],
|
|
602
|
+
title: Optional[str],
|
|
603
|
+
category_name: str,
|
|
604
|
+
file_format: Optional[str],
|
|
605
|
+
description: Optional[str],
|
|
606
|
+
primary: bool,
|
|
607
|
+
latest_edition_association: bool,
|
|
608
|
+
edition: str,
|
|
609
|
+
) -> Dict:
|
|
610
|
+
if not file_path.exists() or not file_path.is_file():
|
|
611
|
+
raise ArenaError(f"File not found: {file_path}")
|
|
612
|
+
|
|
613
|
+
filename = file_path.name # Use truncated SHA256 hash if no edition is provided
|
|
614
|
+
if not edition:
|
|
615
|
+
# Arena seems to only accept 16 characters of edition information.
|
|
616
|
+
# The hex digest gives 16 hex × 4 bits = 64 bits of entropy.
|
|
617
|
+
# Less than a million files, collision risk is practically zero (~1 / 10^8).
|
|
618
|
+
edition = sha256_file(file_path)[:16]
|
|
619
|
+
|
|
620
|
+
# 0) Resolve EFFECTIVE revision guid from item number
|
|
621
|
+
effective_guid = self._api_resolve_item_guid(item_number)
|
|
622
|
+
|
|
623
|
+
# 1) Resolve WORKING revision guid
|
|
624
|
+
revs_url = f"{self._api_base()}/items/{effective_guid}/revisions"
|
|
625
|
+
self._log(f"GET {revs_url}")
|
|
626
|
+
r = self.session.get(revs_url)
|
|
627
|
+
r.raise_for_status()
|
|
628
|
+
data = self._ensure_json(r)
|
|
629
|
+
rows = data.get("results", data if isinstance(data, list) else [])
|
|
630
|
+
working_guid = None
|
|
631
|
+
for rv in rows:
|
|
632
|
+
if (str(rv.get("revisionStatus") or "").upper() == "WORKING") or (
|
|
633
|
+
rv.get("status") == 0
|
|
634
|
+
):
|
|
635
|
+
working_guid = rv.get("guid")
|
|
636
|
+
break
|
|
637
|
+
if not working_guid:
|
|
638
|
+
raise ArenaError(
|
|
639
|
+
"No WORKING revision exists for this item. Create a working revision in Arena, then retry."
|
|
640
|
+
)
|
|
641
|
+
|
|
642
|
+
# Helper to list associations for a given item/revision guid
|
|
643
|
+
def _list_assocs(item_guid: str) -> list:
|
|
644
|
+
url = f"{self._api_base()}/items/{item_guid}/files"
|
|
645
|
+
self._log(f"GET {url}")
|
|
646
|
+
lr = self.session.get(url)
|
|
647
|
+
lr.raise_for_status()
|
|
648
|
+
payload = self._ensure_json(lr)
|
|
649
|
+
return payload.get("results", payload if isinstance(payload, list) else [])
|
|
650
|
+
|
|
651
|
+
# Try to find existing association by exact filename (WORKING first, then EFFECTIVE)
|
|
652
|
+
filename = file_path.name
|
|
653
|
+
assoc = None
|
|
654
|
+
if title:
|
|
655
|
+
candidates = _list_assocs(working_guid)
|
|
656
|
+
|
|
657
|
+
def _a_title(a):
|
|
658
|
+
f = a.get("file") or {}
|
|
659
|
+
return (f.get("title") or a.get("title") or "").strip().casefold()
|
|
660
|
+
|
|
661
|
+
tnorm = title.strip().casefold()
|
|
662
|
+
# Prefer primary + latestEditionAssociation if duplicates exist
|
|
663
|
+
preferred = [
|
|
664
|
+
a
|
|
665
|
+
for a in candidates
|
|
666
|
+
if _a_title(a) == tnorm
|
|
667
|
+
and a.get("primary")
|
|
668
|
+
and a.get("latestEditionAssociation")
|
|
669
|
+
]
|
|
670
|
+
if preferred:
|
|
671
|
+
assoc = preferred[0]
|
|
672
|
+
else:
|
|
673
|
+
any_match = [a for a in candidates if _a_title(a) == tnorm]
|
|
674
|
+
if any_match:
|
|
675
|
+
assoc = any_match[0]
|
|
676
|
+
|
|
677
|
+
for guid in (working_guid, effective_guid):
|
|
678
|
+
assocs = _list_assocs(guid)
|
|
679
|
+
# prefer primary && latestEditionAssociation, then any by name
|
|
680
|
+
prim_latest = [
|
|
681
|
+
a
|
|
682
|
+
for a in assocs
|
|
683
|
+
if a.get("primary")
|
|
684
|
+
and a.get("latestEditionAssociation")
|
|
685
|
+
and ((a.get("file") or {}).get("name") == filename)
|
|
686
|
+
]
|
|
687
|
+
if prim_latest:
|
|
688
|
+
assoc = prim_latest[0]
|
|
689
|
+
break
|
|
690
|
+
any_by_name = [
|
|
691
|
+
a for a in assocs if (a.get("file") or {}).get("name") == filename
|
|
692
|
+
]
|
|
693
|
+
if any_by_name:
|
|
694
|
+
assoc = any_by_name[0]
|
|
695
|
+
break
|
|
696
|
+
|
|
697
|
+
# If an existing file is found: update its content (new edition)
|
|
698
|
+
if assoc:
|
|
699
|
+
file_guid = (assoc.get("file") or {}).get("guid")
|
|
700
|
+
if not file_guid:
|
|
701
|
+
raise ArenaError("Existing association found but no file.guid present.")
|
|
702
|
+
post_url = f"{self._api_base()}/files/{file_guid}/content"
|
|
703
|
+
self._log(f"POST {post_url} (multipart content update)")
|
|
704
|
+
with open(file_path, "rb") as fp:
|
|
705
|
+
files = {"content": (filename, fp, "application/octet-stream")}
|
|
706
|
+
existing_ct = self.session.headers.pop("Content-Type", None)
|
|
707
|
+
try:
|
|
708
|
+
ur = self.session.post(post_url, files=files)
|
|
709
|
+
finally:
|
|
710
|
+
if existing_ct is not None:
|
|
711
|
+
self.session.headers["Content-Type"] = existing_ct
|
|
712
|
+
ur.raise_for_status()
|
|
713
|
+
|
|
714
|
+
# Update the edition label on the File itself
|
|
715
|
+
try:
|
|
716
|
+
put_url = f"{self._api_base()}/files/{file_guid}"
|
|
717
|
+
self._log(f"PUT {put_url} (set edition={edition})")
|
|
718
|
+
pr = self.session.put(put_url, json={"edition": str(edition)})
|
|
719
|
+
pr.raise_for_status()
|
|
720
|
+
except requests.HTTPError as e:
|
|
721
|
+
# Don't fail the whole operation if the label update is rejected
|
|
722
|
+
self._log(f"Edition update failed for {file_guid}: {e}")
|
|
723
|
+
|
|
724
|
+
# Many tenants return 201 with no JSON for content updates. Be flexible.
|
|
725
|
+
data = self._try_json(ur)
|
|
726
|
+
if data is None:
|
|
727
|
+
# Synthesize a small success payload with whatever we can glean.
|
|
728
|
+
return {
|
|
729
|
+
"ok": True,
|
|
730
|
+
"status": ur.status_code,
|
|
731
|
+
"fileGuid": file_guid,
|
|
732
|
+
"location": ur.headers.get("Location"),
|
|
733
|
+
"edition": str(edition),
|
|
734
|
+
}
|
|
735
|
+
return data
|
|
736
|
+
|
|
737
|
+
# Else: create a new association on WORKING
|
|
738
|
+
# 2) Resolve file category guid by name (default: CAD Data)
|
|
739
|
+
cats_url = f"{self._api_base()}/settings/files/categories"
|
|
740
|
+
self._log(f"GET {cats_url}")
|
|
741
|
+
r = self.session.get(cats_url)
|
|
742
|
+
r.raise_for_status()
|
|
743
|
+
cats = self._ensure_json(r).get("results", [])
|
|
744
|
+
cat_guid = None
|
|
745
|
+
for c in cats:
|
|
746
|
+
if c.get("name") == category_name and (c.get("parentCategory") or {}).get(
|
|
747
|
+
"name"
|
|
748
|
+
) in {"Internal File", None}:
|
|
749
|
+
cat_guid = c.get("guid")
|
|
750
|
+
break
|
|
751
|
+
if not cat_guid:
|
|
752
|
+
raise ArenaError(
|
|
753
|
+
f'File category "{category_name}" not found or not allowed.'
|
|
754
|
+
)
|
|
755
|
+
|
|
756
|
+
# 3) Prepare multipart (create association)
|
|
757
|
+
title = title or file_path.name
|
|
758
|
+
file_format = file_format or (
|
|
759
|
+
file_path.suffix[1:].lower() if file_path.suffix else "bin"
|
|
760
|
+
)
|
|
761
|
+
description = description or "Uploaded via gladiator"
|
|
762
|
+
files = {
|
|
763
|
+
"content": (
|
|
764
|
+
file_path.name,
|
|
765
|
+
open(file_path, "rb"),
|
|
766
|
+
"application/octet-stream",
|
|
767
|
+
)
|
|
768
|
+
}
|
|
769
|
+
|
|
770
|
+
# NOTE: nested field names are sent in `data`, not `files`
|
|
771
|
+
data_form = {
|
|
772
|
+
"file.title": title,
|
|
773
|
+
"file.description": description,
|
|
774
|
+
"file.category.guid": cat_guid,
|
|
775
|
+
"file.format": file_format,
|
|
776
|
+
"file.edition": str(edition),
|
|
777
|
+
"file.storageMethodName": "FILE",
|
|
778
|
+
"file.private": "false",
|
|
779
|
+
"primary": "true" if primary else "false",
|
|
780
|
+
"latestEditionAssociation": (
|
|
781
|
+
"true" if latest_edition_association else "false"
|
|
782
|
+
),
|
|
783
|
+
}
|
|
784
|
+
if reference:
|
|
785
|
+
data_form["reference"] = reference
|
|
786
|
+
|
|
787
|
+
# 4) POST to /items/{workingGuid}/files (multipart). Ensure Content-Type not pinned.
|
|
788
|
+
post_url = f"{self._api_base()}/items/{working_guid}/files"
|
|
789
|
+
self._log(f"POST {post_url} (multipart)")
|
|
790
|
+
|
|
791
|
+
with open(file_path, "rb") as fp:
|
|
792
|
+
files = {"content": (filename, fp, "application/octet-stream")}
|
|
793
|
+
existing_ct = self.session.headers.pop("Content-Type", None)
|
|
794
|
+
try:
|
|
795
|
+
cr = self.session.post(post_url, data=data_form, files=files)
|
|
796
|
+
finally:
|
|
797
|
+
if existing_ct is not None:
|
|
798
|
+
self.session.headers["Content-Type"] = existing_ct
|
|
799
|
+
cr.raise_for_status()
|
|
800
|
+
resp = self._ensure_json(cr)
|
|
801
|
+
|
|
802
|
+
# Normalize common fields we use elsewhere
|
|
803
|
+
row = resp if isinstance(resp, dict) else {}
|
|
804
|
+
f = row.get("file", {})
|
|
805
|
+
|
|
806
|
+
# Ensure the edition label is exactly what we asked for (some tenants ignore form edition)
|
|
807
|
+
try:
|
|
808
|
+
file_guid_created = (f or {}).get("guid")
|
|
809
|
+
if file_guid_created and str(edition):
|
|
810
|
+
put_url = f"{self._api_base()}/files/{file_guid_created}"
|
|
811
|
+
self._log(f"PUT {put_url} (set edition={edition})")
|
|
812
|
+
pr = self.session.put(put_url, json={"edition": str(edition)})
|
|
813
|
+
pr.raise_for_status()
|
|
814
|
+
# Update local 'f' edition if the PUT succeeded
|
|
815
|
+
f["edition"] = str(edition)
|
|
816
|
+
except requests.HTTPError as e:
|
|
817
|
+
self._log(
|
|
818
|
+
f"Edition update after create failed for {file_guid_created}: {e}"
|
|
819
|
+
)
|
|
820
|
+
|
|
821
|
+
return {
|
|
822
|
+
"associationGuid": row.get("guid"),
|
|
823
|
+
"primary": row.get("primary"),
|
|
824
|
+
"latestEditionAssociation": row.get("latestEditionAssociation"),
|
|
825
|
+
"file": {
|
|
826
|
+
"guid": f.get("guid"),
|
|
827
|
+
"title": f.get("title"),
|
|
828
|
+
"name": f.get("name"),
|
|
829
|
+
"size": f.get("size"),
|
|
830
|
+
"format": f.get("format"),
|
|
831
|
+
"category": (f.get("category") or {}).get("name"),
|
|
832
|
+
"edition": f.get("edition") or str(edition),
|
|
833
|
+
"lastModifiedDateTime": f.get("lastModifiedDateTime"),
|
|
834
|
+
},
|
|
835
|
+
"downloadUrl": (
|
|
836
|
+
f"{self._api_base()}/files/{(f or {}).get('guid')}/content"
|
|
837
|
+
if f.get("guid")
|
|
838
|
+
else None
|
|
839
|
+
),
|
|
840
|
+
}
|
|
841
|
+
|
|
842
|
+
def _api_resolve_item_guid(self, item_number: str) -> str:
|
|
843
|
+
url = f"{self._api_base()}/items/"
|
|
844
|
+
params = {"number": item_number, "limit": 1, "responseview": "minimal"}
|
|
845
|
+
self._log(f"GET {url} params={params}")
|
|
846
|
+
r = self.session.get(url, params=params)
|
|
847
|
+
r.raise_for_status()
|
|
848
|
+
data = self._ensure_json(r)
|
|
849
|
+
results = data.get("results") if isinstance(data, dict) else data
|
|
850
|
+
if not results:
|
|
851
|
+
raise ArenaError(f"Item number {item_number} not found")
|
|
852
|
+
guid = (
|
|
853
|
+
results[0].get("guid") or results[0].get("id") or results[0].get("itemId")
|
|
854
|
+
)
|
|
855
|
+
if not guid:
|
|
856
|
+
raise ArenaError("API response missing item GUID")
|
|
857
|
+
return guid
|
|
858
|
+
|
|
859
|
+
# --- helper: resolve File Category GUID by name (exact match under Settings) ---
|
|
860
|
+
def _api_resolve_file_category_guid(self, category_name: str) -> str:
|
|
861
|
+
cats_url = f"{self._api_base()}/settings/files/categories"
|
|
862
|
+
self._log(f"GET {cats_url}")
|
|
863
|
+
r = self.session.get(cats_url)
|
|
864
|
+
r.raise_for_status()
|
|
865
|
+
cats = self._ensure_json(r).get("results", [])
|
|
866
|
+
for c in cats:
|
|
867
|
+
if c.get("name") == category_name:
|
|
868
|
+
return c.get("guid")
|
|
869
|
+
raise ArenaError(f'File category "{category_name}" not found.')
|
|
870
|
+
|
|
871
|
+
# --- helper: create a WEB File (no binary content) and return its GUID ---
|
|
872
|
+
def _api_create_web_file(
|
|
873
|
+
self,
|
|
874
|
+
*,
|
|
875
|
+
category_guid: str,
|
|
876
|
+
title: str,
|
|
877
|
+
location_url: str,
|
|
878
|
+
edition: str,
|
|
879
|
+
description: Optional[str],
|
|
880
|
+
file_format: Optional[str],
|
|
881
|
+
private: bool = False,
|
|
882
|
+
) -> dict:
|
|
883
|
+
"""
|
|
884
|
+
POST /files (create File record with storageMethodName=WEB and a 'location')
|
|
885
|
+
"""
|
|
886
|
+
url = f"{self._api_base()}/files"
|
|
887
|
+
payload = {
|
|
888
|
+
"category": {"guid": category_guid},
|
|
889
|
+
"title": title,
|
|
890
|
+
"description": description or "",
|
|
891
|
+
"edition": str(edition),
|
|
892
|
+
"format": file_format or "url",
|
|
893
|
+
"private": bool(private),
|
|
894
|
+
"storageMethodName": "WEB",
|
|
895
|
+
"location": location_url,
|
|
896
|
+
}
|
|
897
|
+
self._log(f"POST {url} (create web file)")
|
|
898
|
+
r = self.session.post(url, json=payload)
|
|
899
|
+
r.raise_for_status()
|
|
900
|
+
data = self._ensure_json(r)
|
|
901
|
+
if not isinstance(data, dict) or not data.get("guid"):
|
|
902
|
+
raise ArenaError("File create (WEB) returned no GUID")
|
|
903
|
+
return data # includes "guid", "number", etc.
|
|
904
|
+
|
|
905
|
+
# --- helper: PUT File (update WEB metadata/location/edition) ---
|
|
906
|
+
def _api_update_web_file(
|
|
907
|
+
self,
|
|
908
|
+
*,
|
|
909
|
+
file_guid: str,
|
|
910
|
+
category_guid: str,
|
|
911
|
+
title: str,
|
|
912
|
+
location_url: str,
|
|
913
|
+
edition: str,
|
|
914
|
+
description: Optional[str],
|
|
915
|
+
file_format: Optional[str],
|
|
916
|
+
private: bool = False,
|
|
917
|
+
) -> dict:
|
|
918
|
+
"""
|
|
919
|
+
PUT /files/{guid} (update summary). For WEB/FTP/PLACE_HOLDER, include 'location'.
|
|
920
|
+
"""
|
|
921
|
+
url = f"{self._api_base()}/files/{file_guid}"
|
|
922
|
+
payload = {
|
|
923
|
+
"category": {"guid": category_guid},
|
|
924
|
+
"title": title,
|
|
925
|
+
"description": description or "",
|
|
926
|
+
"edition": str(edition),
|
|
927
|
+
"format": file_format or "url",
|
|
928
|
+
"private": bool(private),
|
|
929
|
+
"storageMethodName": "WEB",
|
|
930
|
+
"location": location_url,
|
|
931
|
+
}
|
|
932
|
+
self._log(f"PUT {url} (update web file)")
|
|
933
|
+
r = self.session.put(url, json=payload)
|
|
934
|
+
r.raise_for_status()
|
|
935
|
+
return self._ensure_json(r)
|
|
936
|
+
|
|
937
|
+
def _api_item_add_existing_file(
|
|
938
|
+
self,
|
|
939
|
+
*,
|
|
940
|
+
item_guid: str,
|
|
941
|
+
file_guid: str,
|
|
942
|
+
primary: bool,
|
|
943
|
+
latest_edition_association: bool,
|
|
944
|
+
reference: Optional[str] = None,
|
|
945
|
+
) -> dict:
|
|
946
|
+
url = f"{self._api_base()}/items/{item_guid}/files"
|
|
947
|
+
payload = {
|
|
948
|
+
"primary": bool(primary),
|
|
949
|
+
"latestEditionAssociation": bool(latest_edition_association),
|
|
950
|
+
"file": {"guid": file_guid},
|
|
951
|
+
}
|
|
952
|
+
if reference:
|
|
953
|
+
payload["reference"] = reference
|
|
954
|
+
r = self.session.post(url, json=payload)
|
|
955
|
+
r.raise_for_status()
|
|
956
|
+
return self._ensure_json(r)
|
|
957
|
+
|
|
958
|
+
def upload_weblink_to_working(
|
|
959
|
+
self,
|
|
960
|
+
*,
|
|
961
|
+
item_number: str,
|
|
962
|
+
url: str,
|
|
963
|
+
reference: Optional[str] = None, # (unused by "add existing"; kept for parity)
|
|
964
|
+
title: str,
|
|
965
|
+
category_name: str = "Web Link",
|
|
966
|
+
file_format: Optional[str] = "url",
|
|
967
|
+
description: Optional[str] = None,
|
|
968
|
+
primary: bool = True,
|
|
969
|
+
latest_edition_association: bool = True,
|
|
970
|
+
edition: Optional[str] = None,
|
|
971
|
+
) -> Dict:
|
|
972
|
+
"""
|
|
973
|
+
Idempotent "upsert" of a WEB-link File on the WORKING revision of `item_number`.
|
|
974
|
+
|
|
975
|
+
Match rules (WORKING first, then EFFECTIVE):
|
|
976
|
+
- any association whose File has storageMethodName in {"WEB","FTP"} AND
|
|
977
|
+
(File.title == title OR File.location == url)
|
|
978
|
+
|
|
979
|
+
If found -> PUT /files/{fileGuid} with storageMethodName=WEB + location + edition.
|
|
980
|
+
Else -> POST /files (create) + POST /items/{workingGuid}/files (add existing).
|
|
981
|
+
"""
|
|
982
|
+
# Compute an edition if none is provided (SHA256 of the URL, truncated to 16)
|
|
983
|
+
if not edition:
|
|
984
|
+
edition = hashlib.sha256(url.encode("utf-8")).hexdigest()[:16]
|
|
985
|
+
|
|
986
|
+
# Resolve item GUIDs
|
|
987
|
+
effective_guid = self._api_resolve_item_guid(item_number)
|
|
988
|
+
revs_url = f"{self._api_base()}/items/{effective_guid}/revisions"
|
|
989
|
+
self._log(f"GET {revs_url}")
|
|
990
|
+
r = self.session.get(revs_url)
|
|
991
|
+
r.raise_for_status()
|
|
992
|
+
revs = self._ensure_json(r).get("results", [])
|
|
993
|
+
working_guid = None
|
|
994
|
+
for rv in revs:
|
|
995
|
+
if (str(rv.get("revisionStatus") or "").upper() == "WORKING") or (
|
|
996
|
+
rv.get("status") == 0
|
|
997
|
+
):
|
|
998
|
+
working_guid = rv.get("guid")
|
|
999
|
+
break
|
|
1000
|
+
if not working_guid:
|
|
1001
|
+
raise ArenaError(
|
|
1002
|
+
"No WORKING revision exists for this item. Create a working revision in Arena, then retry."
|
|
1003
|
+
)
|
|
1004
|
+
|
|
1005
|
+
# Resolve category GUID
|
|
1006
|
+
cat_guid = self._api_resolve_file_category_guid(category_name)
|
|
1007
|
+
|
|
1008
|
+
# Helper to list associations for a given item/revision guid
|
|
1009
|
+
def _list_assocs(guid: str) -> list[dict]:
|
|
1010
|
+
url2 = f"{self._api_base()}/items/{guid}/files"
|
|
1011
|
+
self._log(f"GET {url2}")
|
|
1012
|
+
lr = self.session.get(url2)
|
|
1013
|
+
lr.raise_for_status()
|
|
1014
|
+
payload = self._ensure_json(lr)
|
|
1015
|
+
return payload.get("results", payload if isinstance(payload, list) else [])
|
|
1016
|
+
|
|
1017
|
+
# Try to find an existing WEB/FTP style file by title or URL
|
|
1018
|
+
def _pick_assoc_by_title_or_url(assocs: list[dict]) -> Optional[dict]:
|
|
1019
|
+
pick = None
|
|
1020
|
+
for a in assocs:
|
|
1021
|
+
f = a.get("file") or {}
|
|
1022
|
+
smn = str(
|
|
1023
|
+
f.get("storageMethodName") or f.get("storageMethod") or ""
|
|
1024
|
+
).upper()
|
|
1025
|
+
if smn not in {"WEB", "FTP"}:
|
|
1026
|
+
continue
|
|
1027
|
+
f_title = (f.get("title") or "").strip()
|
|
1028
|
+
f_loc = (f.get("location") or "").strip()
|
|
1029
|
+
if (f_title and f_title == title) or (f_loc and f_loc == url):
|
|
1030
|
+
if not pick:
|
|
1031
|
+
pick = a
|
|
1032
|
+
continue
|
|
1033
|
+
# prefer latestEditionAssociation + primary
|
|
1034
|
+
if (
|
|
1035
|
+
a.get("latestEditionAssociation") and a.get("primary")
|
|
1036
|
+
) and not (
|
|
1037
|
+
pick.get("latestEditionAssociation") and pick.get("primary")
|
|
1038
|
+
):
|
|
1039
|
+
pick = a
|
|
1040
|
+
return pick
|
|
1041
|
+
|
|
1042
|
+
assoc = _pick_assoc_by_title_or_url(
|
|
1043
|
+
_list_assocs(working_guid)
|
|
1044
|
+
) or _pick_assoc_by_title_or_url(_list_assocs(effective_guid))
|
|
1045
|
+
|
|
1046
|
+
# If found: update the File summary (ensures storageMethodName=WEB + new location/edition)
|
|
1047
|
+
if assoc:
|
|
1048
|
+
file_guid = (assoc.get("file") or {}).get("guid")
|
|
1049
|
+
if not file_guid:
|
|
1050
|
+
raise ArenaError(
|
|
1051
|
+
"Existing web-link association found but missing file.guid"
|
|
1052
|
+
)
|
|
1053
|
+
updated = self._api_update_web_file(
|
|
1054
|
+
file_guid=file_guid,
|
|
1055
|
+
category_guid=cat_guid,
|
|
1056
|
+
title=title,
|
|
1057
|
+
location_url=url,
|
|
1058
|
+
edition=str(edition),
|
|
1059
|
+
description=description,
|
|
1060
|
+
file_format=file_format,
|
|
1061
|
+
private=False,
|
|
1062
|
+
)
|
|
1063
|
+
# Normalize to a consistent response
|
|
1064
|
+
return {
|
|
1065
|
+
"ok": True,
|
|
1066
|
+
"action": "updated",
|
|
1067
|
+
"file": {
|
|
1068
|
+
"guid": updated.get("guid"),
|
|
1069
|
+
"number": updated.get("number"),
|
|
1070
|
+
"title": updated.get("title"),
|
|
1071
|
+
"edition": updated.get("edition"),
|
|
1072
|
+
"storageMethodName": updated.get("storageMethodName"),
|
|
1073
|
+
"location": updated.get("location"),
|
|
1074
|
+
},
|
|
1075
|
+
"associationGuid": assoc.get("guid"),
|
|
1076
|
+
"primary": assoc.get("primary"),
|
|
1077
|
+
"latestEditionAssociation": assoc.get("latestEditionAssociation"),
|
|
1078
|
+
}
|
|
1079
|
+
|
|
1080
|
+
# Else: create a new WEB file, then associate it on WORKING
|
|
1081
|
+
created = self._api_create_web_file(
|
|
1082
|
+
category_guid=cat_guid,
|
|
1083
|
+
title=title,
|
|
1084
|
+
location_url=url,
|
|
1085
|
+
edition=str(edition),
|
|
1086
|
+
description=description,
|
|
1087
|
+
file_format=file_format,
|
|
1088
|
+
private=False,
|
|
1089
|
+
)
|
|
1090
|
+
file_guid = created.get("guid")
|
|
1091
|
+
assoc_resp = self._api_item_add_existing_file(
|
|
1092
|
+
item_guid=working_guid,
|
|
1093
|
+
file_guid=file_guid,
|
|
1094
|
+
primary=primary,
|
|
1095
|
+
latest_edition_association=latest_edition_association,
|
|
1096
|
+
reference=reference,
|
|
1097
|
+
)
|
|
1098
|
+
|
|
1099
|
+
return {
|
|
1100
|
+
"ok": True,
|
|
1101
|
+
"action": "created",
|
|
1102
|
+
"associationGuid": assoc_resp.get("guid"),
|
|
1103
|
+
"primary": assoc_resp.get("primary"),
|
|
1104
|
+
"latestEditionAssociation": assoc_resp.get("latestEditionAssociation"),
|
|
1105
|
+
"file": {
|
|
1106
|
+
"guid": file_guid,
|
|
1107
|
+
"number": created.get("number"),
|
|
1108
|
+
"title": created.get("title"),
|
|
1109
|
+
"edition": created.get("edition"),
|
|
1110
|
+
"storageMethodName": created.get("storageMethodName") or "WEB",
|
|
1111
|
+
"location": created.get("location") or url,
|
|
1112
|
+
},
|
|
1113
|
+
}
|
|
1114
|
+
|
|
1115
|
+
def _run(self, cmd: str) -> Tuple[int, str, str]:
|
|
1116
|
+
proc = subprocess.run(
|
|
1117
|
+
cmd, shell=True, check=False, capture_output=True, text=True
|
|
1118
|
+
)
|
|
1119
|
+
return proc.returncode, proc.stdout.strip(), proc.stderr.strip()
|