golem-vm-provider 0.1.49__py3-none-any.whl → 0.1.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: golem-vm-provider
3
- Version: 0.1.49
3
+ Version: 0.1.50
4
4
  Summary: VM on Golem Provider Node - Run your own provider node to offer VMs on the Golem Network
5
5
  Keywords: golem,vm,provider,cloud,decentralized
6
6
  Author: Phillip Jensen
@@ -16,7 +16,7 @@ provider/discovery/service.py,sha256=vX_mVSxvn3arnb2cKDM_SeJp1ZgPdImP2aUubeXgdRg
16
16
  provider/main.py,sha256=RSq2_dbBjQYkNwohxuwgzKbnzcqzEgJH1wcPMMf00t0,18925
17
17
  provider/network/port_verifier.py,sha256=3l6WNwBHydggJRFYkAsuBp1eCxaU619kjWuM-zSVj2o,13267
18
18
  provider/payments/blockchain_service.py,sha256=4GrzDKwCSUVoENqjD4RLyJ0qwBOJKMyVk5Li-XNsyTc,3567
19
- provider/payments/monitor.py,sha256=76ab-86UzFzWOkf9UtKE4B3Rz9-LRVjRrUJOHEKnwFA,4055
19
+ provider/payments/monitor.py,sha256=Rw17zYsxZre0zU6R0oeRNvVIzMdXLsgoUvSPHpJy6I0,4488
20
20
  provider/payments/stream_map.py,sha256=qk6Y8hS72DplAifZ0ZMWPHBAyc_3IWIQyWUBuCU3_To,1191
21
21
  provider/security/ethereum.py,sha256=EwPZj4JR8OEpto6LhKjuuT3Z9pBX6P7-UQaqJtqFkYQ,1242
22
22
  provider/security/faucet.py,sha256=8T4lW1fVQgUk8EQILgbrr9UUosw9e7eA40tlZ2_KCPQ,4368
@@ -38,7 +38,7 @@ provider/vm/port_manager.py,sha256=iYSwjTjD_ziOhG8aI7juKHw1OwwRUTJQyQoRUNQvz9w,1
38
38
  provider/vm/provider.py,sha256=A7QN89EJjcSS40_SmKeinG1Jp_NGffJaLse-XdKciAs,1164
39
39
  provider/vm/proxy_manager.py,sha256=n4NTsyz2rtrvjtf_ceKBk-g2q_mzqPwruB1q7UlQVBc,14928
40
40
  provider/vm/service.py,sha256=Ki4SGNIZUq3XmaPMwAOoNzdZzKQsmFXid374wgjFPes,4636
41
- golem_vm_provider-0.1.49.dist-info/METADATA,sha256=4-pM4zumZUj7ZfXOiWO2s-lC4BsquHGFdETQfFkyPTQ,16585
42
- golem_vm_provider-0.1.49.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
43
- golem_vm_provider-0.1.49.dist-info/entry_points.txt,sha256=5Jiie1dIXygmxmDW66bKKxQpmBLJ7leSKRrb8bkQALw,52
44
- golem_vm_provider-0.1.49.dist-info/RECORD,,
41
+ golem_vm_provider-0.1.50.dist-info/METADATA,sha256=h4dTmyA9UOyvi8R3pp13bW34sxr8W8JSoYzALN8gRmA,16585
42
+ golem_vm_provider-0.1.50.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
43
+ golem_vm_provider-0.1.50.dist-info/entry_points.txt,sha256=5Jiie1dIXygmxmDW66bKKxQpmBLJ7leSKRrb8bkQALw,52
44
+ golem_vm_provider-0.1.50.dist-info/RECORD,,
@@ -15,11 +15,21 @@ class StreamMonitor:
15
15
  self.settings = settings
16
16
  self._task: Optional[asyncio.Task] = None
17
17
 
18
+ def _get(self, key: str, default=None):
19
+ """Safely read setting from either an object with attributes or a dict-like mapping."""
20
+ try:
21
+ return getattr(self.settings, key)
22
+ except Exception:
23
+ try:
24
+ return self.settings.get(key, default)
25
+ except Exception:
26
+ return default
27
+
18
28
  def start(self):
19
- if self.settings.STREAM_MONITOR_ENABLED or self.settings.STREAM_WITHDRAW_ENABLED:
29
+ if self._get("STREAM_MONITOR_ENABLED", False) or self._get("STREAM_WITHDRAW_ENABLED", False):
20
30
  logger.info(
21
- f"⏱️ Stream monitor enabled (check={self.settings.STREAM_MONITOR_ENABLED}, withdraw={self.settings.STREAM_WITHDRAW_ENABLED}) "
22
- f"interval={self.settings.STREAM_MONITOR_INTERVAL_SECONDS}s"
31
+ f"⏱️ Stream monitor enabled (check={self._get('STREAM_MONITOR_ENABLED', False)}, "
32
+ f"withdraw={self._get('STREAM_WITHDRAW_ENABLED', False)}) interval={self._get('STREAM_MONITOR_INTERVAL_SECONDS', 60)}s"
23
33
  )
24
34
  self._task = asyncio.create_task(self._run(), name="stream-monitor")
25
35
 
@@ -35,7 +45,7 @@ class StreamMonitor:
35
45
  last_withdraw = 0
36
46
  while True:
37
47
  try:
38
- await asyncio.sleep(self.settings.STREAM_MONITOR_INTERVAL_SECONDS)
48
+ await asyncio.sleep(int(self._get("STREAM_MONITOR_INTERVAL_SECONDS", 60)))
39
49
  items = await self.stream_map.all_items()
40
50
  now = int(self.reader.web3.eth.get_block("latest")["timestamp"]) if items else 0
41
51
  logger.debug(f"stream monitor tick: {len(items)} streams, now={now}")
@@ -51,7 +61,7 @@ class StreamMonitor:
51
61
  f"stream {stream_id} for VM {vm_id}: start={s['startTime']} stop={s['stopTime']} "
52
62
  f"rate={s['ratePerSecond']} withdrawn={s['withdrawn']} halted={s['halted']} remaining={remaining}s"
53
63
  )
54
- if self.settings.STREAM_MONITOR_ENABLED and remaining < self.settings.STREAM_MIN_REMAINING_SECONDS:
64
+ if self._get("STREAM_MONITOR_ENABLED", False) and remaining < int(self._get("STREAM_MIN_REMAINING_SECONDS", 0)):
55
65
  logger.info(f"Stopping VM {vm_id} due to low stream runway ({remaining}s)")
56
66
  try:
57
67
  await self.vm_service.stop_vm(vm_id)
@@ -59,16 +69,16 @@ class StreamMonitor:
59
69
  logger.warning(f"stop_vm failed for {vm_id}: {e}")
60
70
  else:
61
71
  logger.debug(
62
- f"VM {vm_id} stream {stream_id} healthy (remaining={remaining}s, threshold={self.settings.STREAM_MIN_REMAINING_SECONDS}s)"
72
+ f"VM {vm_id} stream {stream_id} healthy (remaining={remaining}s, threshold={self._get('STREAM_MIN_REMAINING_SECONDS', 0)}s)"
63
73
  )
64
74
  # Withdraw if enough vested and configured
65
- if self.settings.STREAM_WITHDRAW_ENABLED and self.client:
75
+ if self._get("STREAM_WITHDRAW_ENABLED", False) and self.client:
66
76
  vested = max(min(now, s["stopTime"]) - s["startTime"], 0) * s["ratePerSecond"]
67
77
  withdrawable = max(vested - s["withdrawn"], 0)
68
78
  logger.debug(f"withdraw check stream {stream_id}: vested={vested} withdrawable={withdrawable}")
69
79
  # Enforce a minimum interval between withdrawals
70
- if withdrawable >= self.settings.STREAM_MIN_WITHDRAW_WEI and (
71
- now - last_withdraw >= self.settings.STREAM_WITHDRAW_INTERVAL_SECONDS
80
+ if withdrawable >= int(self._get("STREAM_MIN_WITHDRAW_WEI", 0)) and (
81
+ now - last_withdraw >= int(self._get("STREAM_WITHDRAW_INTERVAL_SECONDS", 1800))
72
82
  ):
73
83
  try:
74
84
  self.client.withdraw(stream_id)