oqtopus 0.2.1__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
oqtopus/core/module.py CHANGED
@@ -1,21 +1,43 @@
1
1
  import json
2
+ import os
3
+ import re
4
+ import time
2
5
 
3
- from qgis.PyQt.QtCore import QByteArray, QObject, QUrl, pyqtSignal
6
+ from qgis.PyQt.QtCore import (
7
+ QByteArray,
8
+ QObject,
9
+ QTimer,
10
+ QUrl,
11
+ pyqtSignal,
12
+ )
4
13
  from qgis.PyQt.QtNetwork import QNetworkAccessManager, QNetworkReply, QNetworkRequest
5
14
 
6
15
  from ..utils.plugin_utils import PluginUtils, logger
7
16
  from .module_package import ModulePackage
8
17
 
18
+ # Cache duration in seconds (1 hour)
19
+ CACHE_DURATION = 3600
20
+
9
21
 
10
22
  class Module(QObject):
11
23
  signal_versionsLoaded = pyqtSignal(str)
12
24
  signal_developmentVersionsLoaded = pyqtSignal(str)
13
25
 
14
- def __init__(self, name: str, organisation: str, repository: str, parent=None):
26
+ def __init__(
27
+ self,
28
+ name: str,
29
+ id: str,
30
+ organisation: str,
31
+ repository: str,
32
+ exclude_releases: str | None = None,
33
+ parent=None,
34
+ ):
15
35
  super().__init__(parent)
16
36
  self.name = name
37
+ self.id = id
17
38
  self.organisation = organisation
18
39
  self.repository = repository
40
+ self.exclude_releases = exclude_releases
19
41
  self.versions = []
20
42
  self.development_versions = []
21
43
  self.latest_version = None
@@ -24,7 +46,67 @@ class Module(QObject):
24
46
  def __repr__(self):
25
47
  return f"Module(name={self.name}, organisation={self.organisation}, repository={self.repository})"
26
48
 
49
+ def __get_cache_dir(self):
50
+ """Get the cache directory for GitHub API responses."""
51
+ cache_dir = os.path.join(
52
+ PluginUtils.plugin_cache_path(), "github_api", self.organisation, self.repository
53
+ )
54
+ os.makedirs(cache_dir, exist_ok=True)
55
+ return cache_dir
56
+
57
+ def __get_cache_file(self, cache_type):
58
+ """Get the cache file path for a specific type (releases or pulls)."""
59
+ cache_file = os.path.join(self.__get_cache_dir(), f"{cache_type}.json")
60
+ return cache_file
61
+
62
+ def __read_cache(self, cache_type):
63
+ """Read cached data if it exists and is not expired."""
64
+ cache_file = self.__get_cache_file(cache_type)
65
+ if not os.path.exists(cache_file):
66
+ return None
67
+
68
+ # Check if cache is expired
69
+ file_age = time.time() - os.path.getmtime(cache_file)
70
+ if file_age > CACHE_DURATION:
71
+ return None
72
+
73
+ try:
74
+ with open(cache_file, encoding="utf-8") as f:
75
+ data = json.load(f)
76
+ logger.info(
77
+ f"Using cached {cache_type} data (age: {file_age:.0f}s, {len(data)} items)"
78
+ )
79
+ return data
80
+ except Exception as e:
81
+ logger.warning(f"Failed to read cache for {cache_type}: {e}")
82
+ return None
83
+
84
+ def __write_cache(self, cache_type, data):
85
+ """Write data to cache file."""
86
+ cache_file = self.__get_cache_file(cache_type)
87
+ try:
88
+ with open(cache_file, "w", encoding="utf-8") as f:
89
+ json.dump(data, f)
90
+ except Exception as e:
91
+ logger.warning(f"Failed to write cache for {cache_type}: {e}")
92
+
27
93
  def start_load_versions(self):
94
+ # Read cache asynchronously to avoid blocking UI
95
+ QTimer.singleShot(0, self.__async_load_versions)
96
+
97
+ def __async_load_versions(self):
98
+ """Load versions asynchronously from cache or API."""
99
+ # Try to load from cache first
100
+ cached_data = self.__read_cache("releases")
101
+ if cached_data is not None:
102
+ try:
103
+ self._process_versions_data(cached_data)
104
+ self.signal_versionsLoaded.emit("")
105
+ return
106
+ except Exception as e:
107
+ logger.warning(f"Failed to process cached releases: {e}")
108
+
109
+ # Cache miss or invalid - fetch from API
28
110
  url = f"https://api.github.com/repos/{self.organisation}/{self.repository}/releases"
29
111
  logger.info(f"Loading versions from '{url}'...")
30
112
  request = QNetworkRequest(QUrl(url))
@@ -42,36 +124,63 @@ class Module(QObject):
42
124
  try:
43
125
  data = reply.readAll().data()
44
126
  json_versions = json.loads(data.decode())
45
- self.versions = []
46
- self.latest_version = None
47
- for json_version in json_versions:
48
- module_package = ModulePackage(
49
- module=self,
50
- organisation=self.organisation,
51
- repository=self.repository,
52
- json_payload=json_version,
53
- type=ModulePackage.Type.RELEASE,
54
- )
55
- self.versions.append(module_package)
56
-
57
- # Latest version -> most recent commit date for non prerelease
58
- if module_package.prerelease is True:
59
- continue
60
127
 
61
- if self.latest_version is None:
62
- self.latest_version = module_package
63
- continue
128
+ # Cache the response
129
+ self.__write_cache("releases", json_versions)
64
130
 
65
- if module_package.created_at > self.latest_version.created_at:
66
- self.latest_version = module_package
131
+ self._process_versions_data(json_versions)
67
132
  self.signal_versionsLoaded.emit("")
68
133
  except Exception as e:
69
134
  self.signal_versionsLoaded.emit(str(e))
70
135
  reply.deleteLater()
71
136
 
137
+ def _process_versions_data(self, json_versions):
138
+ """Process versions data from cache or API response."""
139
+ self.versions = []
140
+ self.latest_version = None
141
+
142
+ # Compile exclude pattern if specified
143
+ exclude_pattern = None
144
+ if self.exclude_releases:
145
+ try:
146
+ exclude_pattern = re.compile(self.exclude_releases)
147
+ except re.error as e:
148
+ logger.warning(f"Invalid exclude_releases pattern '{self.exclude_releases}': {e}")
149
+
150
+ for json_version in json_versions:
151
+ # Check if this release should be excluded
152
+ tag_name = json_version.get("tag_name", "")
153
+ if exclude_pattern and exclude_pattern.search(tag_name):
154
+ continue
155
+
156
+ module_package = ModulePackage(
157
+ module=self,
158
+ organisation=self.organisation,
159
+ repository=self.repository,
160
+ json_payload=json_version,
161
+ type=ModulePackage.Type.RELEASE,
162
+ )
163
+ self.versions.append(module_package)
164
+
165
+ # Latest version -> most recent commit date for non prerelease
166
+ if module_package.prerelease is True:
167
+ continue
168
+
169
+ if self.latest_version is None:
170
+ self.latest_version = module_package
171
+ continue
172
+
173
+ if module_package.created_at > self.latest_version.created_at:
174
+ self.latest_version = module_package
175
+
72
176
  def start_load_development_versions(self):
73
177
  self.development_versions = []
74
178
 
179
+ # Add pre-releases from already loaded versions
180
+ for version in self.versions:
181
+ if version.prerelease is True:
182
+ self.development_versions.append(version)
183
+
75
184
  # Create version for the main branch
76
185
  mainVersion = ModulePackage(
77
186
  module=self,
@@ -82,10 +191,20 @@ class Module(QObject):
82
191
  name="main",
83
192
  branch="main",
84
193
  )
194
+ # Fetch the latest commit SHA for caching (async to avoid blocking UI)
195
+ QTimer.singleShot(0, lambda: mainVersion.fetch_commit_sha())
85
196
  self.development_versions.append(mainVersion)
86
197
 
198
+ # Try to load pull requests from cache first
199
+ cached_data = self.__read_cache("pulls")
200
+ if cached_data is not None:
201
+ # Process cache asynchronously to keep UI responsive
202
+ QTimer.singleShot(0, lambda: self._process_cached_pulls(cached_data))
203
+ return
204
+
205
+ # Cache miss or invalid - fetch from API
87
206
  url = f"https://api.github.com/repos/{self.organisation}/{self.repository}/pulls"
88
- logger.info(f"Loading development versions from '{url}'...")
207
+ logger.info(f"Loading pre-releases and development versions from '{url}'...")
89
208
 
90
209
  request = QNetworkRequest(QUrl(url))
91
210
  headers = PluginUtils.get_github_headers()
@@ -94,6 +213,23 @@ class Module(QObject):
94
213
  reply = self.network_manager.get(request)
95
214
  reply.finished.connect(lambda: self._on_development_versions_reply(reply))
96
215
 
216
+ def _process_cached_pulls(self, cached_data):
217
+ """Process cached pull requests data asynchronously."""
218
+ try:
219
+ self._process_pull_requests_data(cached_data)
220
+ self.signal_developmentVersionsLoaded.emit("")
221
+ except Exception as e:
222
+ logger.warning(f"Failed to process cached pull requests: {e}")
223
+ # On error, continue with API call
224
+ url = f"https://api.github.com/repos/{self.organisation}/{self.repository}/pulls"
225
+ logger.info(f"Loading pre-releases and development versions from '{url}'...")
226
+ request = QNetworkRequest(QUrl(url))
227
+ headers = PluginUtils.get_github_headers()
228
+ for key, value in headers.items():
229
+ request.setRawHeader(QByteArray(key.encode()), QByteArray(value.encode()))
230
+ reply = self.network_manager.get(request)
231
+ reply.finished.connect(lambda: self._on_development_versions_reply(reply))
232
+
97
233
  def _on_development_versions_reply(self, reply):
98
234
  if reply.error() != QNetworkReply.NetworkError.NoError:
99
235
  self.signal_developmentVersionsLoaded.emit(reply.errorString())
@@ -103,16 +239,24 @@ class Module(QObject):
103
239
  try:
104
240
  data = reply.readAll().data()
105
241
  json_versions = json.loads(data.decode())
106
- for json_version in json_versions:
107
- module_package = ModulePackage(
108
- module=self,
109
- organisation=self.organisation,
110
- repository=self.repository,
111
- json_payload=json_version,
112
- type=ModulePackage.Type.PULL_REQUEST,
113
- )
114
- self.development_versions.append(module_package)
242
+
243
+ # Cache the response
244
+ self.__write_cache("pulls", json_versions)
245
+
246
+ self._process_pull_requests_data(json_versions)
115
247
  self.signal_developmentVersionsLoaded.emit("")
116
248
  except Exception as e:
117
249
  self.signal_developmentVersionsLoaded.emit(str(e))
118
250
  reply.deleteLater()
251
+
252
+ def _process_pull_requests_data(self, json_versions):
253
+ """Process pull requests data from cache or API response."""
254
+ for json_version in json_versions:
255
+ module_package = ModulePackage(
256
+ module=self,
257
+ organisation=self.organisation,
258
+ repository=self.repository,
259
+ json_payload=json_version,
260
+ type=ModulePackage.Type.PULL_REQUEST,
261
+ )
262
+ self.development_versions.append(module_package)
@@ -0,0 +1,234 @@
1
+ """Background task for module install/upgrade/uninstall operations."""
2
+
3
+ import psycopg
4
+ from qgis.PyQt.QtCore import QThread, pyqtSignal
5
+
6
+ from ..libs.pum.feedback import Feedback
7
+ from ..libs.pum.pum_config import PumConfig
8
+ from ..libs.pum.upgrader import Upgrader
9
+ from ..utils.plugin_utils import logger
10
+
11
+
12
+ class ModuleOperationCanceled(Exception):
13
+ """Exception raised when module operation is canceled."""
14
+
15
+
16
+ class ModuleOperationTask(QThread):
17
+ """
18
+ Background task for running module install/upgrade/uninstall operations.
19
+ This allows the UI to remain responsive and show progress during long operations.
20
+ """
21
+
22
+ signalProgress = pyqtSignal(str, int, int) # message, current, total
23
+ signalFinished = pyqtSignal(bool, str) # success, error_message
24
+
25
+ def __init__(self, parent=None):
26
+ super().__init__(parent)
27
+
28
+ self.__pum_config = None
29
+ self.__connection = None
30
+ self.__operation = None # 'install', 'upgrade', 'uninstall'
31
+ self.__parameters = None
32
+ self.__options = {}
33
+
34
+ self.__feedback = None
35
+ self.__canceled = False
36
+ self.__error_message = None
37
+
38
+ def start_install(
39
+ self, pum_config: PumConfig, connection: psycopg.Connection, parameters: dict, **options
40
+ ):
41
+ """Start an install operation."""
42
+ self.__pum_config = pum_config
43
+ self.__connection = connection
44
+ self.__operation = "install"
45
+ self.__parameters = parameters
46
+ self.__options = options
47
+ self.__canceled = False
48
+ self.__error_message = None
49
+ self.start()
50
+
51
+ def start_upgrade(
52
+ self, pum_config: PumConfig, connection: psycopg.Connection, parameters: dict, **options
53
+ ):
54
+ """Start an upgrade operation."""
55
+ self.__pum_config = pum_config
56
+ self.__connection = connection
57
+ self.__operation = "upgrade"
58
+ self.__parameters = parameters
59
+ self.__options = options
60
+ self.__canceled = False
61
+ self.__error_message = None
62
+ self.start()
63
+
64
+ def start_uninstall(
65
+ self, pum_config: PumConfig, connection: psycopg.Connection, parameters: dict, **options
66
+ ):
67
+ """Start an uninstall operation."""
68
+ self.__pum_config = pum_config
69
+ self.__connection = connection
70
+ self.__operation = "uninstall"
71
+ self.__parameters = parameters
72
+ self.__options = options
73
+ self.__canceled = False
74
+ self.__error_message = None
75
+ self.start()
76
+
77
+ def start_roles(
78
+ self, pum_config: PumConfig, connection: psycopg.Connection, parameters: dict, **options
79
+ ):
80
+ """Start a create and grant roles operation."""
81
+ self.__pum_config = pum_config
82
+ self.__connection = connection
83
+ self.__operation = "roles"
84
+ self.__parameters = parameters
85
+ self.__options = options
86
+ self.__canceled = False
87
+ self.__error_message = None
88
+ self.start()
89
+
90
+ def cancel(self):
91
+ """Cancel the current operation."""
92
+ self.__canceled = True
93
+ if self.__feedback:
94
+ self.__feedback.cancel()
95
+
96
+ def run(self):
97
+ """Execute the operation in a background thread."""
98
+ try:
99
+ # Create feedback instance that emits signals
100
+ self.__feedback = self._create_feedback()
101
+
102
+ upgrader = Upgrader(config=self.__pum_config)
103
+
104
+ if self.__operation == "install":
105
+ self._run_install(upgrader)
106
+ elif self.__operation == "upgrade":
107
+ self._run_upgrade(upgrader)
108
+ elif self.__operation == "uninstall":
109
+ self._run_uninstall(upgrader)
110
+ elif self.__operation == "roles":
111
+ self._run_roles()
112
+ else:
113
+ raise Exception(f"Unknown operation: {self.__operation}")
114
+
115
+ # Commit if successful and not canceled
116
+ if not self.__canceled and self.__options.get("commit", True):
117
+ logger.info("Committing changes to database...")
118
+ self.__connection.commit()
119
+ logger.info("Changes committed to the database.")
120
+
121
+ logger.info(f"Operation '{self.__operation}' completed successfully")
122
+ self.signalFinished.emit(True, "")
123
+
124
+ except Exception as e:
125
+ logger.critical(f"Module operation error in '{self.__operation}': {e}")
126
+ logger.exception("Full traceback:") # Log full stack trace
127
+ self.__error_message = str(e)
128
+ # Rollback on error
129
+ try:
130
+ logger.info("Rolling back transaction...")
131
+ self.__connection.rollback()
132
+ logger.info("Transaction rolled back")
133
+ except Exception as rollback_error:
134
+ logger.error(f"Rollback failed: {rollback_error}")
135
+ self.signalFinished.emit(False, self.__error_message)
136
+
137
+ def _run_install(self, upgrader: Upgrader):
138
+ """Run install operation."""
139
+ # Extract options that should not be passed to install()
140
+ install_demo_data = self.__options.pop("install_demo_data", False)
141
+ demo_data_name = self.__options.pop("demo_data_name", None)
142
+
143
+ upgrader.install(
144
+ connection=self.__connection,
145
+ parameters=self.__parameters,
146
+ feedback=self.__feedback,
147
+ commit=False,
148
+ **self.__options,
149
+ )
150
+
151
+ # Install demo data if requested
152
+ if install_demo_data and demo_data_name:
153
+ upgrader.install_demo_data(
154
+ connection=self.__connection,
155
+ name=demo_data_name,
156
+ parameters=self.__parameters,
157
+ )
158
+
159
+ def _run_upgrade(self, upgrader: Upgrader):
160
+ """Run upgrade operation."""
161
+ upgrader.upgrade(
162
+ connection=self.__connection,
163
+ parameters=self.__parameters,
164
+ feedback=self.__feedback,
165
+ **self.__options,
166
+ )
167
+
168
+ def _run_uninstall(self, upgrader: Upgrader):
169
+ """Run uninstall operation."""
170
+ logger.info("Starting uninstall operation...")
171
+ logger.debug(f"Parameters: {self.__parameters}")
172
+ logger.debug(f"Options: {self.__options}")
173
+
174
+ upgrader.uninstall(
175
+ connection=self.__connection,
176
+ parameters=self.__parameters,
177
+ feedback=self.__feedback,
178
+ commit=False,
179
+ )
180
+
181
+ logger.info("Uninstall operation completed")
182
+
183
+ def _run_roles(self):
184
+ """Run create and grant roles operation."""
185
+ logger.info("Starting create and grant roles operation...")
186
+
187
+ role_manager = self.__pum_config.role_manager()
188
+
189
+ if not role_manager.roles:
190
+ logger.warning("No roles defined in the configuration")
191
+ return
192
+
193
+ # Create roles with grant=True to also grant permissions
194
+ role_manager.create_roles(
195
+ connection=self.__connection,
196
+ grant=True,
197
+ feedback=self.__feedback,
198
+ )
199
+
200
+ logger.info("Create and grant roles operation completed")
201
+
202
+ def _create_feedback(self):
203
+ """Create a Feedback instance that emits Qt signals."""
204
+
205
+ class QtFeedback(Feedback):
206
+ """Feedback implementation that emits Qt signals."""
207
+
208
+ def __init__(self, task):
209
+ super().__init__()
210
+ self.task = task
211
+
212
+ def report_progress(self, message: str, current: int = 0, total: int = 0):
213
+ """Report progress via Qt signal.
214
+
215
+ If current and total are provided (non-zero), use those.
216
+ Otherwise, use the internal step counter.
217
+ """
218
+ # Use provided values if available, otherwise use internal counter
219
+ if current > 0 or total > 0:
220
+ actual_current = current
221
+ actual_total = total
222
+ else:
223
+ actual_current, actual_total = self.get_progress()
224
+
225
+ logger.info(
226
+ f"[{actual_current}/{actual_total}] {message}" if actual_total > 0 else message
227
+ )
228
+ self.task.signalProgress.emit(message, actual_current, actual_total)
229
+
230
+ def is_cancelled(self):
231
+ """Check if operation is cancelled."""
232
+ return self.task._ModuleOperationTask__canceled
233
+
234
+ return QtFeedback(self)
@@ -23,6 +23,7 @@ class ModulePackage:
23
23
  type=Type.RELEASE,
24
24
  name=None,
25
25
  branch=None,
26
+ commit_sha=None,
26
27
  ):
27
28
  self.module = module
28
29
  self.organisation = organisation
@@ -30,6 +31,7 @@ class ModulePackage:
30
31
  self.type = type
31
32
  self.name = name
32
33
  self.branch = branch
34
+ self.commit_sha = commit_sha
33
35
  self.created_at = None
34
36
  self.prerelease = False
35
37
  self.html_url = None
@@ -63,6 +65,26 @@ class ModulePackage:
63
65
 
64
66
  return self.name
65
67
 
68
+ def fetch_commit_sha(self):
69
+ """Fetch the latest commit SHA for the branch from GitHub API."""
70
+ if self.type not in (ModulePackage.Type.BRANCH, ModulePackage.Type.PULL_REQUEST):
71
+ return
72
+
73
+ try:
74
+ # For branches: use refs/heads/{branch}
75
+ # For PRs: use refs/heads/{branch} from the head repo
76
+ url = f"https://api.github.com/repos/{self.organisation}/{self.repository}/commits/{self.branch}"
77
+ r = requests.get(url, headers=PluginUtils.get_github_headers(), timeout=10)
78
+ r.raise_for_status()
79
+ commit_data = r.json()
80
+ self.commit_sha = commit_data["sha"]
81
+ except Exception as e:
82
+ # If we can't fetch the commit SHA, we'll fall back to not caching
83
+ from ..utils.plugin_utils import logger
84
+
85
+ logger.warning(f"Failed to fetch commit SHA for branch '{self.branch}': {e}")
86
+ self.commit_sha = None
87
+
66
88
  def __parse_release(self, json_payload: dict):
67
89
  if self.name is None:
68
90
  self.name = json_payload["name"]
@@ -75,17 +97,11 @@ class ModulePackage:
75
97
  self.prerelease = json_payload["prerelease"]
76
98
  self.html_url = json_payload["html_url"]
77
99
 
78
- self.__parse_release_assets(json_payload["assets_url"])
79
-
80
- def __parse_release_assets(self, assets_url: str):
81
-
82
- # Load assets
83
- r = requests.get(assets_url, headers=PluginUtils.get_github_headers())
84
-
85
- # Raise an exception in case of http errors
86
- r.raise_for_status()
100
+ # Use assets directly from the release payload (already included in releases API response)
101
+ self.__parse_release_assets(json_payload.get("assets", []))
87
102
 
88
- json_assets = r.json()
103
+ def __parse_release_assets(self, json_assets: list):
104
+ """Parse release assets from the already-fetched release data."""
89
105
  for json_asset in json_assets:
90
106
  asset = ModuleAsset(
91
107
  name=json_asset["name"],
@@ -113,6 +129,7 @@ class ModulePackage:
113
129
  if self.name is None:
114
130
  self.name = f"#{json_payload['number']} {json_payload['title']}"
115
131
  self.branch = json_payload["head"]["ref"]
132
+ self.commit_sha = json_payload["head"]["sha"]
116
133
  self.created_at = QDateTime.fromString(json_payload["created_at"], Qt.DateFormat.ISODate)
117
134
  self.prerelease = False
118
135
  self.html_url = json_payload["html_url"]
@@ -3,8 +3,10 @@ from pydantic import BaseModel
3
3
 
4
4
  class ModuleConfig(BaseModel):
5
5
  name: str
6
+ id: str
6
7
  organisation: str
7
8
  repository: str
9
+ exclude_releases: str | None = None # Regexp pattern to exclude releases
8
10
 
9
11
 
10
12
  class ModulesConfig(BaseModel):