netcrawl-sdk 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,95 @@
1
+ Metadata-Version: 2.4
2
+ Name: netcrawl-sdk
3
+ Version: 0.1.0
4
+ Summary: Python SDK for NetCrawl — the programmable idle game
5
+ Author: NetCrawl
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/Starscribers/netcrawl-workspace
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Topic :: Games/Entertainment
10
+ Requires-Python: >=3.10
11
+ Description-Content-Type: text/markdown
12
+ Provides-Extra: dev
13
+ Requires-Dist: pytest; extra == "dev"
14
+
15
+ # netcrawl Python SDK
16
+
17
+ Write NetCrawl workers in Python. Workers communicate with the local game server over HTTP.
18
+
19
+ ## Install
20
+
21
+ ```bash
22
+ pip install .
23
+ ```
24
+
25
+ ## Quick start
26
+
27
+ ```python
28
+ from netcrawl import WorkerClass, Route
29
+ from netcrawl.items.equipment import Pickaxe
30
+
31
+ class Collector(WorkerClass):
32
+ """Harvests ore and returns to hub."""
33
+ pickaxe = Pickaxe()
34
+ to_mine = Route("Path from Hub to ore node")
35
+ to_hub = Route("Return path back to Hub")
36
+
37
+ def on_startup(self):
38
+ self.trips = 0
39
+
40
+ def on_loop(self):
41
+ self.move_through(self.to_mine)
42
+ self.collect()
43
+ self.move_through(self.to_hub)
44
+ self.deposit()
45
+ self.trips += 1
46
+ ```
47
+
48
+ ## Field types
49
+
50
+ | Field | Description |
51
+ |---|---|
52
+ | `Pickaxe()` | Consumes 1 Pickaxe from inventory at deploy time |
53
+ | `Shield()` | Consumes 1 Shield from inventory at deploy time |
54
+ | `Beacon()` | Consumes 1 Beacon from inventory at deploy time |
55
+ | `Route("description")` | User specifies a list of node IDs in the UI |
56
+
57
+ ## Worker API
58
+
59
+ | Method | Description |
60
+ |---|---|
61
+ | `self.move(node_id)` | Move to adjacent node (blocking) |
62
+ | `self.move_through(route)` | Walk a list of node IDs in order |
63
+ | `self.collect()` / `self.harvest()` | Harvest resources at current node |
64
+ | `self.deposit()` | Deposit carried resources at Hub |
65
+ | `self.scan()` | Scan adjacent nodes |
66
+ | `self.repair(node_id)` | Repair an infected adjacent node |
67
+ | `self.info/warn/error(msg)` | Log a message (visible in UI) |
68
+ | `self.current_node` | Current node ID (property) |
69
+ | `self.carrying` | Currently held resources (property) |
70
+
71
+ ## AdvancedGraphGadget mixin
72
+
73
+ ```python
74
+ from netcrawl.mixins.graph import AdvancedGraphGadget
75
+
76
+ class Explorer(WorkerClass, AdvancedGraphGadget):
77
+ def on_loop(self):
78
+ self.travel_to("r3") # A* pathfinding
79
+ nearest = self.find_nearest("ore")
80
+ nodes = self.explore() # wide-radius scan
81
+ ```
82
+
83
+ ## Schema introspection
84
+
85
+ ```python
86
+ print(Collector.get_schema())
87
+ ```
88
+
89
+ ## Running examples
90
+
91
+ ```bash
92
+ cd packages/sdk-python
93
+ python examples/test_sdk.py
94
+ ```
95
+
@@ -0,0 +1,81 @@
1
+ # netcrawl Python SDK
2
+
3
+ Write NetCrawl workers in Python. Workers communicate with the local game server over HTTP.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ pip install .
9
+ ```
10
+
11
+ ## Quick start
12
+
13
+ ```python
14
+ from netcrawl import WorkerClass, Route
15
+ from netcrawl.items.equipment import Pickaxe
16
+
17
+ class Collector(WorkerClass):
18
+ """Harvests ore and returns to hub."""
19
+ pickaxe = Pickaxe()
20
+ to_mine = Route("Path from Hub to ore node")
21
+ to_hub = Route("Return path back to Hub")
22
+
23
+ def on_startup(self):
24
+ self.trips = 0
25
+
26
+ def on_loop(self):
27
+ self.move_through(self.to_mine)
28
+ self.collect()
29
+ self.move_through(self.to_hub)
30
+ self.deposit()
31
+ self.trips += 1
32
+ ```
33
+
34
+ ## Field types
35
+
36
+ | Field | Description |
37
+ |---|---|
38
+ | `Pickaxe()` | Consumes 1 Pickaxe from inventory at deploy time |
39
+ | `Shield()` | Consumes 1 Shield from inventory at deploy time |
40
+ | `Beacon()` | Consumes 1 Beacon from inventory at deploy time |
41
+ | `Route("description")` | User specifies a list of node IDs in the UI |
42
+
43
+ ## Worker API
44
+
45
+ | Method | Description |
46
+ |---|---|
47
+ | `self.move(node_id)` | Move to adjacent node (blocking) |
48
+ | `self.move_through(route)` | Walk a list of node IDs in order |
49
+ | `self.collect()` / `self.harvest()` | Harvest resources at current node |
50
+ | `self.deposit()` | Deposit carried resources at Hub |
51
+ | `self.scan()` | Scan adjacent nodes |
52
+ | `self.repair(node_id)` | Repair an infected adjacent node |
53
+ | `self.info/warn/error(msg)` | Log a message (visible in UI) |
54
+ | `self.current_node` | Current node ID (property) |
55
+ | `self.carrying` | Currently held resources (property) |
56
+
57
+ ## AdvancedGraphGadget mixin
58
+
59
+ ```python
60
+ from netcrawl.mixins.graph import AdvancedGraphGadget
61
+
62
+ class Explorer(WorkerClass, AdvancedGraphGadget):
63
+ def on_loop(self):
64
+ self.travel_to("r3") # A* pathfinding
65
+ nearest = self.find_nearest("ore")
66
+ nodes = self.explore() # wide-radius scan
67
+ ```
68
+
69
+ ## Schema introspection
70
+
71
+ ```python
72
+ print(Collector.get_schema())
73
+ ```
74
+
75
+ ## Running examples
76
+
77
+ ```bash
78
+ cd packages/sdk-python
79
+ python examples/test_sdk.py
80
+ ```
81
+
@@ -0,0 +1,23 @@
1
+ from netcrawl.base import WorkerClass
2
+ from netcrawl.network.edge import Edge
3
+ from netcrawl.network.route import Route
4
+ from netcrawl.app import NetCrawl
5
+ from netcrawl.icons import Icon
6
+ from netcrawl.items import Pickaxe, Shield, Beacon, SensorGadget, BasicSensor, AdvancedSensor
7
+ from netcrawl.services import CacheService, ServiceNotReachable
8
+ from netcrawl.nodes import (
9
+ BaseNode, HubNode, ResourceNode,
10
+ ComputeNode, ComputeTask, APINode, APIRequestObj,
11
+ LockedNode, InfectedNode, NodeEdge,
12
+ )
13
+ from netcrawl.sensors import EdgeInfo, AdvancedEdgeInfo
14
+
15
+ __all__ = [
16
+ "WorkerClass", "Edge", "Route", "NetCrawl", "Icon",
17
+ "Pickaxe", "Shield", "Beacon", "SensorGadget", "BasicSensor", "AdvancedSensor",
18
+ "CacheService", "ServiceNotReachable",
19
+ "BaseNode", "HubNode", "ResourceNode",
20
+ "ComputeNode", "ComputeTask", "APINode", "APIRequestObj",
21
+ "LockedNode", "InfectedNode", "NodeEdge",
22
+ "EdgeInfo", "AdvancedEdgeInfo",
23
+ ]
@@ -0,0 +1,176 @@
1
+ """
2
+ netcrawl/app.py
3
+
4
+ NetCrawl code server — registers worker classes with the game server,
5
+ polls for deploy requests, and spawns worker subprocesses.
6
+ """
7
+
8
+ import time
9
+ from typing import Type
10
+
11
+ from netcrawl.base import WorkerClass
12
+ from netcrawl.client import http_post, http_get
13
+ from netcrawl.daemon.spawner import spawn_worker, kill_worker, list_active
14
+
15
+
16
+ class NetCrawl:
17
+ """
18
+ Code server that bridges your worker classes with the game server.
19
+
20
+ Usage:
21
+ app = NetCrawl(server="http://localhost:4800")
22
+ app.register(Miner)
23
+ app.register(Guardian)
24
+ app.run()
25
+ """
26
+
27
+ def __init__(self, server: str = "http://localhost:4800", api_key: str = ""):
28
+ self.server = server.rstrip("/")
29
+ self.api_key = api_key
30
+ self._classes: dict[str, Type[WorkerClass]] = {}
31
+ self._class_files: dict[str, str] = {}
32
+
33
+ def register(self, cls: Type[WorkerClass]) -> None:
34
+ """Register a worker class for deployment. Raises on duplicate class_id."""
35
+ import inspect
36
+ class_id = cls.class_id
37
+ class_name = cls.class_name
38
+
39
+ if class_id in self._classes:
40
+ existing = self._classes[class_id]
41
+ raise ValueError(
42
+ f"Duplicate class_id '{class_id}': "
43
+ f"{cls.__name__} conflicts with {existing.__name__}"
44
+ )
45
+
46
+ self._classes[class_id] = cls
47
+ source_file = inspect.getfile(cls)
48
+ self._class_files[class_id] = source_file
49
+ print(f"[NetCrawl] Registered: {class_name} (id={class_id})")
50
+
51
+ def _post(self, path: str, data: dict) -> dict:
52
+ return http_post(f"{self.server}{path}", data)
53
+
54
+ def _get(self, path: str) -> dict:
55
+ return http_get(f"{self.server}{path}")
56
+
57
+ def _register_all(self) -> None:
58
+ """Register all worker classes with the game server."""
59
+ classes = []
60
+ for class_id, cls in self._classes.items():
61
+ schema = cls.get_schema()
62
+ schema["file"] = self._class_files.get(class_id, "")
63
+ schema["language"] = "python"
64
+ classes.append(schema)
65
+
66
+ result = self._post("/api/worker-classes/register", {"classes": classes})
67
+ if result.get("ok"):
68
+ print(f"[NetCrawl] Registered {result.get('registered', 0)} worker classes")
69
+ else:
70
+ print(f"[NetCrawl] Registration failed: {result.get('error')}")
71
+
72
+ def _poll_deploy_queue(self) -> None:
73
+ """Poll the game server for pending deploy requests and spawn workers."""
74
+ try:
75
+ result = self._get("/api/deploy-queue")
76
+ requests = result.get("requests", [])
77
+ for req in requests:
78
+ self._handle_deploy(req)
79
+ except Exception as e:
80
+ pass # Server might be temporarily unreachable
81
+
82
+ def _handle_deploy(self, deploy_req: dict) -> None:
83
+ """Spawn a worker subprocess for a deploy request."""
84
+ worker_id = deploy_req["workerId"]
85
+ class_id = deploy_req["classId"]
86
+ node_id = deploy_req["nodeId"]
87
+ injected_fields = deploy_req.get("injectedFields", {})
88
+
89
+ cls = self._classes.get(class_id)
90
+ if not cls:
91
+ print(f"[NetCrawl] Unknown class_id: {class_id}")
92
+ self._post("/api/deploy-ack", {
93
+ "workerId": worker_id,
94
+ "error": f"Unknown worker class_id: {class_id}",
95
+ })
96
+ return
97
+
98
+ script_path = self._class_files.get(class_id, "")
99
+ print(f"[NetCrawl] Spawning {cls.class_name} (id={class_id}, worker={worker_id}) on node {node_id}")
100
+
101
+ try:
102
+ pid = spawn_worker(
103
+ worker_id=worker_id,
104
+ script_path=script_path,
105
+ class_name=cls.__name__, # Python class name for import
106
+ api_url=self.server,
107
+ injected_fields=injected_fields,
108
+ )
109
+ print(f"[NetCrawl] Spawned {cls.class_name} — PID {pid}")
110
+ self._post("/api/deploy-ack", {
111
+ "workerId": worker_id,
112
+ "pid": pid,
113
+ })
114
+ except Exception as e:
115
+ print(f"[NetCrawl] Spawn failed: {e}")
116
+ self._post("/api/deploy-ack", {
117
+ "workerId": worker_id,
118
+ "error": str(e),
119
+ })
120
+
121
+ def _wait_for_server(self, timeout: int = 30) -> bool:
122
+ """Wait for the game server to be reachable."""
123
+ start = time.time()
124
+ while time.time() - start < timeout:
125
+ try:
126
+ result = self._get("/health")
127
+ if result.get("status") == "ok":
128
+ return True
129
+ except Exception:
130
+ pass
131
+ time.sleep(1)
132
+ return False
133
+
134
+ def run(self) -> None:
135
+ """
136
+ Start the code server:
137
+ 1. Wait for the game server
138
+ 2. Register all worker classes
139
+ 3. Poll for deploy requests every second
140
+ 4. Re-register every 30s to handle server restarts
141
+ """
142
+ print(f"[NetCrawl] Code Server starting...")
143
+ print(f"[NetCrawl] Server: {self.server}")
144
+ worker_list = ', '.join(f"{cls.class_name}({cid})" for cid, cls in self._classes.items())
145
+ print(f"[NetCrawl] Workers: {worker_list}")
146
+ print()
147
+
148
+ # Wait for server
149
+ print("[NetCrawl] Waiting for game server...")
150
+ if not self._wait_for_server():
151
+ print("[NetCrawl] ERROR: Game server not reachable. Is it running?")
152
+ return
153
+
154
+ print("[NetCrawl] Game server connected!")
155
+ self._register_all()
156
+
157
+ print()
158
+ print("[NetCrawl] Code server running. Polling for deploy requests...")
159
+ print("[NetCrawl] Press Ctrl+C to stop.")
160
+
161
+ register_counter = 0
162
+ try:
163
+ while True:
164
+ self._poll_deploy_queue()
165
+ time.sleep(1)
166
+
167
+ # Re-register every 30 polls (~30s)
168
+ register_counter += 1
169
+ if register_counter >= 30:
170
+ register_counter = 0
171
+ self._register_all()
172
+ except KeyboardInterrupt:
173
+ print("\n[NetCrawl] Shutting down...")
174
+ for u in list_active():
175
+ kill_worker(u["worker_id"])
176
+ print("[NetCrawl] All workers stopped. Goodbye!")