nefino-geosync 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nefino_geosync/__init__.py +0 -0
- nefino_geosync/access_rule_filter.py +11 -0
- nefino_geosync/api_client.py +119 -0
- nefino_geosync/compose_requests.py +134 -0
- nefino_geosync/config.py +82 -0
- nefino_geosync/download_analysis.py +131 -0
- nefino_geosync/download_completed_analyses.py +21 -0
- nefino_geosync/get_downloadable_analyses.py +48 -0
- nefino_geosync/graphql_errors.py +73 -0
- nefino_geosync/journal.py +199 -0
- nefino_geosync/layer_changelog.py +240 -0
- nefino_geosync/parse_args.py +15 -0
- nefino_geosync/run.py +59 -0
- nefino_geosync/schema.json +2262 -0
- nefino_geosync/schema.py +356 -0
- nefino_geosync/session_logger.py +112 -0
- nefino_geosync/start_analyses.py +57 -0
- nefino_geosync/storage.py +40 -0
- nefino_geosync-0.2.3.dist-info/METADATA +271 -0
- nefino_geosync-0.2.3.dist-info/RECORD +23 -0
- nefino_geosync-0.2.3.dist-info/WHEEL +4 -0
- nefino_geosync-0.2.3.dist-info/entry_points.txt +2 -0
- nefino_geosync-0.2.3.dist-info/licenses/LICENSE +201 -0
nefino_geosync/schema.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
import sgqlc.types
|
|
2
|
+
import sgqlc.types.datetime
|
|
3
|
+
|
|
4
|
+
schema = sgqlc.types.Schema()
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
########################################################################
|
|
8
|
+
# Scalars and Enumerations
|
|
9
|
+
########################################################################
|
|
10
|
+
Boolean = sgqlc.types.Boolean
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CRSType(sgqlc.types.Enum):
|
|
14
|
+
__schema__ = schema
|
|
15
|
+
__choices__ = ('EPSG_25832', 'EPSG_25833', 'EPSG_3035', 'EPSG_4326')
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class LayerChangeAction(sgqlc.types.Enum):
|
|
19
|
+
__schema__ = schema
|
|
20
|
+
__choices__ = ('created', 'updated', 'deleted', 'regions_changed')
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
DateTime = sgqlc.types.datetime.DateTime
|
|
24
|
+
|
|
25
|
+
Float = sgqlc.types.Float
|
|
26
|
+
|
|
27
|
+
ID = sgqlc.types.ID
|
|
28
|
+
|
|
29
|
+
Int = sgqlc.types.Int
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class OutputObjectType(sgqlc.types.Enum):
|
|
33
|
+
__schema__ = schema
|
|
34
|
+
__choices__ = ('GPKG', 'QGIS_AND_GPKG', 'QGIS_PRJ', 'SHP')
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class PlaceTypeGeo(sgqlc.types.Enum):
|
|
38
|
+
__schema__ = schema
|
|
39
|
+
__choices__ = (
|
|
40
|
+
'ADMINISTRATIVE_UNIT_GEO',
|
|
41
|
+
'COUNTRY',
|
|
42
|
+
'FEDERAL_STATE_GEO',
|
|
43
|
+
'LOCAL_ADMINISTRATIVE_UNITS_GEO',
|
|
44
|
+
'PLANNING_REGIONS_GEO',
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class PlaceTypeNews(sgqlc.types.Enum):
|
|
49
|
+
__schema__ = schema
|
|
50
|
+
__choices__ = (
|
|
51
|
+
'ADMINISTRATIVE_UNIT',
|
|
52
|
+
'COUNTRY',
|
|
53
|
+
'COUNTY',
|
|
54
|
+
'FEDERAL_STATE',
|
|
55
|
+
'LOCAL_ADMINISTRATIVE_UNITS',
|
|
56
|
+
'PLANNING_REGIONS',
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ScopeType(sgqlc.types.Enum):
|
|
61
|
+
__schema__ = schema
|
|
62
|
+
__choices__ = (
|
|
63
|
+
'ADMINISTRATIVE_UNIT',
|
|
64
|
+
'FEDERAL_STATE',
|
|
65
|
+
'LOCAL_ADMINISTRATIVE_UNIT',
|
|
66
|
+
'PLANNING_REGION',
|
|
67
|
+
'POLYGON',
|
|
68
|
+
'RADIUS',
|
|
69
|
+
'SQUARE',
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class Status(sgqlc.types.Enum):
|
|
74
|
+
__schema__ = schema
|
|
75
|
+
__choices__ = ('ERROR', 'PENDING', 'RUNNING', 'SUCCESS')
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
String = sgqlc.types.String
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class UUID(sgqlc.types.Scalar):
|
|
82
|
+
__schema__ = schema
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
########################################################################
|
|
86
|
+
# Input Objects
|
|
87
|
+
########################################################################
|
|
88
|
+
class CoordinateInput(sgqlc.types.Input):
|
|
89
|
+
__schema__ = schema
|
|
90
|
+
__field_names__ = ('lon', 'lat')
|
|
91
|
+
lon = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='lon')
|
|
92
|
+
lat = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='lat')
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class GeoAnalysisInput(sgqlc.types.Input):
|
|
96
|
+
__schema__ = schema
|
|
97
|
+
__field_names__ = ('name', 'specs')
|
|
98
|
+
name = sgqlc.types.Field(String, graphql_name='name')
|
|
99
|
+
specs = sgqlc.types.Field(sgqlc.types.non_null('GeoAnalysisObjectInput'), graphql_name='specs')
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class GeoAnalysisLayerInput(sgqlc.types.Input):
|
|
103
|
+
__schema__ = schema
|
|
104
|
+
__field_names__ = ('layer_name', 'buffer_m')
|
|
105
|
+
layer_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='layerName')
|
|
106
|
+
buffer_m = sgqlc.types.Field(
|
|
107
|
+
sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Int))),
|
|
108
|
+
graphql_name='bufferM',
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class GeoAnalysisObjectInput(sgqlc.types.Input):
|
|
113
|
+
__schema__ = schema
|
|
114
|
+
__field_names__ = ('coordinate', 'scope', 'requests', 'operations', 'output')
|
|
115
|
+
coordinate = sgqlc.types.Field(sgqlc.types.non_null(CoordinateInput), graphql_name='coordinate')
|
|
116
|
+
scope = sgqlc.types.Field(sgqlc.types.non_null('GeoAnalysisScopeInput'), graphql_name='scope')
|
|
117
|
+
requests = sgqlc.types.Field(
|
|
118
|
+
sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('GeoAnalysisRequestInput'))),
|
|
119
|
+
graphql_name='requests',
|
|
120
|
+
)
|
|
121
|
+
operations = sgqlc.types.Field(
|
|
122
|
+
sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('GeoAnalysisOperationInput'))),
|
|
123
|
+
graphql_name='operations',
|
|
124
|
+
)
|
|
125
|
+
output = sgqlc.types.Field(sgqlc.types.non_null('GeoAnalysisOutputFormatInput'), graphql_name='output')
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class GeoAnalysisOperationInput(sgqlc.types.Input):
|
|
129
|
+
__schema__ = schema
|
|
130
|
+
__field_names__ = ('operation_name', 'input')
|
|
131
|
+
operation_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='operationName')
|
|
132
|
+
input = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(String)), graphql_name='input')
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class GeoAnalysisOutputFormatInput(sgqlc.types.Input):
|
|
136
|
+
__schema__ = schema
|
|
137
|
+
__field_names__ = ('template_name', 'type', 'crs')
|
|
138
|
+
template_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='templateName')
|
|
139
|
+
type = sgqlc.types.Field(sgqlc.types.non_null(OutputObjectType), graphql_name='type')
|
|
140
|
+
crs = sgqlc.types.Field(sgqlc.types.non_null(CRSType), graphql_name='crs')
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class GeoAnalysisRequestInput(sgqlc.types.Input):
|
|
144
|
+
__schema__ = schema
|
|
145
|
+
__field_names__ = ('cluster_name', 'layers')
|
|
146
|
+
cluster_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='clusterName')
|
|
147
|
+
layers = sgqlc.types.Field(
|
|
148
|
+
sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(GeoAnalysisLayerInput))),
|
|
149
|
+
graphql_name='layers',
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class GeoAnalysisScopeInput(sgqlc.types.Input):
|
|
154
|
+
__schema__ = schema
|
|
155
|
+
__field_names__ = ('type', 'radius', 'sides', 'polygon', 'place')
|
|
156
|
+
type = sgqlc.types.Field(sgqlc.types.non_null(ScopeType), graphql_name='type')
|
|
157
|
+
radius = sgqlc.types.Field(Float, graphql_name='radius')
|
|
158
|
+
sides = sgqlc.types.Field(Float, graphql_name='sides')
|
|
159
|
+
polygon = sgqlc.types.Field(String, graphql_name='polygon')
|
|
160
|
+
place = sgqlc.types.Field(String, graphql_name='place')
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class LayerChangelogInput(sgqlc.types.Input):
|
|
164
|
+
__schema__ = schema
|
|
165
|
+
__field_names__ = (
|
|
166
|
+
'layer_id',
|
|
167
|
+
'cluster_id',
|
|
168
|
+
'layer_name',
|
|
169
|
+
'cluster_name',
|
|
170
|
+
'timestamp_start',
|
|
171
|
+
'timestamp_end',
|
|
172
|
+
'changed_field',
|
|
173
|
+
'action',
|
|
174
|
+
)
|
|
175
|
+
layer_id = sgqlc.types.Field(Int, graphql_name='layerId')
|
|
176
|
+
cluster_id = sgqlc.types.Field(Int, graphql_name='clusterId')
|
|
177
|
+
layer_name = sgqlc.types.Field(String, graphql_name='layerName')
|
|
178
|
+
cluster_name = sgqlc.types.Field(String, graphql_name='clusterName')
|
|
179
|
+
timestamp_start = sgqlc.types.Field(DateTime, graphql_name='timestampStart')
|
|
180
|
+
timestamp_end = sgqlc.types.Field(DateTime, graphql_name='timestampEnd')
|
|
181
|
+
changed_field = sgqlc.types.Field(String, graphql_name='changedField')
|
|
182
|
+
action = sgqlc.types.Field(LayerChangeAction, graphql_name='action')
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
########################################################################
|
|
186
|
+
# Output Objects and Interfaces
|
|
187
|
+
########################################################################
|
|
188
|
+
class MinimalAnalysis(sgqlc.types.Type):
|
|
189
|
+
__schema__ = schema
|
|
190
|
+
__field_names__ = ('started_at', 'pk', 'status', 'url')
|
|
191
|
+
started_at = sgqlc.types.Field(DateTime, graphql_name='startedAt')
|
|
192
|
+
pk = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name='pk')
|
|
193
|
+
status = sgqlc.types.Field(sgqlc.types.non_null(Status), graphql_name='status')
|
|
194
|
+
url = sgqlc.types.Field(String, graphql_name='url')
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class MinimalAreasEnabled(sgqlc.types.Type):
|
|
198
|
+
__schema__ = schema
|
|
199
|
+
__field_names__ = ('all_areas_enabled', 'enabled_states')
|
|
200
|
+
all_areas_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='allAreasEnabled')
|
|
201
|
+
enabled_states = sgqlc.types.Field(sgqlc.types.list_of('PlaceIdentifier'), graphql_name='enabledStates')
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class MinimalCluster(sgqlc.types.Type):
|
|
205
|
+
__schema__ = schema
|
|
206
|
+
__field_names__ = ('name', 'layers', 'has_access')
|
|
207
|
+
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
|
|
208
|
+
layers = sgqlc.types.Field(
|
|
209
|
+
sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('MinimalLayer'))),
|
|
210
|
+
graphql_name='layers',
|
|
211
|
+
)
|
|
212
|
+
has_access = sgqlc.types.Field(Boolean, graphql_name='hasAccess')
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
class MinimalGeoAccessRule(sgqlc.types.Type):
|
|
216
|
+
__schema__ = schema
|
|
217
|
+
__field_names__ = ('all_clusters_enabled', 'clusters', 'places')
|
|
218
|
+
all_clusters_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='allClustersEnabled')
|
|
219
|
+
clusters = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='clusters')
|
|
220
|
+
places = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='places')
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class MinimalLayer(sgqlc.types.Type):
|
|
224
|
+
__schema__ = schema
|
|
225
|
+
__field_names__ = ('name', 'pre_buffer', 'last_update', 'is_regional')
|
|
226
|
+
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
|
|
227
|
+
pre_buffer = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name='preBuffer')
|
|
228
|
+
last_update = sgqlc.types.Field(DateTime, graphql_name='lastUpdate')
|
|
229
|
+
is_regional = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='isRegional')
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
class LayerChangelogEntry(sgqlc.types.Type):
|
|
233
|
+
__schema__ = schema
|
|
234
|
+
__field_names__ = (
|
|
235
|
+
'layer_name',
|
|
236
|
+
'timestamp',
|
|
237
|
+
'action',
|
|
238
|
+
'changed_fields',
|
|
239
|
+
'attributes',
|
|
240
|
+
'layer_id',
|
|
241
|
+
'last_update',
|
|
242
|
+
'cluster_name',
|
|
243
|
+
'cluster_id',
|
|
244
|
+
)
|
|
245
|
+
layer_name = sgqlc.types.Field(String, graphql_name='layerName')
|
|
246
|
+
timestamp = sgqlc.types.Field(DateTime, graphql_name='timestamp')
|
|
247
|
+
action = sgqlc.types.Field(LayerChangeAction, graphql_name='action')
|
|
248
|
+
changed_fields = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='changedFields')
|
|
249
|
+
attributes = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='attributes')
|
|
250
|
+
layer_id = sgqlc.types.Field(Int, graphql_name='layerId')
|
|
251
|
+
last_update = sgqlc.types.Field(DateTime, graphql_name='lastUpdate')
|
|
252
|
+
cluster_name = sgqlc.types.Field(String, graphql_name='clusterName')
|
|
253
|
+
cluster_id = sgqlc.types.Field(Int, graphql_name='clusterId')
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class Mutation(sgqlc.types.Type):
|
|
257
|
+
__schema__ = schema
|
|
258
|
+
__field_names__ = ('start_analysis',)
|
|
259
|
+
start_analysis = sgqlc.types.Field(
|
|
260
|
+
MinimalAnalysis,
|
|
261
|
+
graphql_name='startAnalysis',
|
|
262
|
+
args=sgqlc.types.ArgDict(
|
|
263
|
+
(
|
|
264
|
+
(
|
|
265
|
+
'inputs',
|
|
266
|
+
sgqlc.types.Arg(
|
|
267
|
+
sgqlc.types.non_null(GeoAnalysisInput),
|
|
268
|
+
graphql_name='inputs',
|
|
269
|
+
default=None,
|
|
270
|
+
),
|
|
271
|
+
),
|
|
272
|
+
)
|
|
273
|
+
),
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
class PlaceIdentifier(sgqlc.types.Type):
|
|
278
|
+
__schema__ = schema
|
|
279
|
+
__field_names__ = ('place_id', 'place_type')
|
|
280
|
+
place_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name='placeId')
|
|
281
|
+
place_type = sgqlc.types.Field(sgqlc.types.non_null(PlaceTypeNews), graphql_name='placeType')
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
class Query(sgqlc.types.Type):
|
|
285
|
+
__schema__ = schema
|
|
286
|
+
__field_names__ = (
|
|
287
|
+
'analysis_metadata',
|
|
288
|
+
'allowed_analysis_areas',
|
|
289
|
+
'clusters',
|
|
290
|
+
'regional_layers',
|
|
291
|
+
'access_rules',
|
|
292
|
+
'layer_changelog',
|
|
293
|
+
)
|
|
294
|
+
analysis_metadata = sgqlc.types.Field(
|
|
295
|
+
sgqlc.types.list_of(MinimalAnalysis),
|
|
296
|
+
graphql_name='analysisMetadata',
|
|
297
|
+
args=sgqlc.types.ArgDict(
|
|
298
|
+
(
|
|
299
|
+
(
|
|
300
|
+
'analysis_id',
|
|
301
|
+
sgqlc.types.Arg(UUID, graphql_name='analysisId', default=None),
|
|
302
|
+
),
|
|
303
|
+
)
|
|
304
|
+
),
|
|
305
|
+
)
|
|
306
|
+
allowed_analysis_areas = sgqlc.types.Field(MinimalAreasEnabled, graphql_name='allowedAnalysisAreas')
|
|
307
|
+
clusters = sgqlc.types.Field(sgqlc.types.list_of(MinimalCluster), graphql_name='clusters')
|
|
308
|
+
regional_layers = sgqlc.types.Field(
|
|
309
|
+
sgqlc.types.list_of(MinimalLayer),
|
|
310
|
+
graphql_name='regionalLayers',
|
|
311
|
+
args=sgqlc.types.ArgDict(
|
|
312
|
+
(
|
|
313
|
+
(
|
|
314
|
+
'place_type',
|
|
315
|
+
sgqlc.types.Arg(
|
|
316
|
+
sgqlc.types.non_null(PlaceTypeGeo),
|
|
317
|
+
graphql_name='placeType',
|
|
318
|
+
default=None,
|
|
319
|
+
),
|
|
320
|
+
),
|
|
321
|
+
(
|
|
322
|
+
'place_id',
|
|
323
|
+
sgqlc.types.Arg(
|
|
324
|
+
sgqlc.types.non_null(String),
|
|
325
|
+
graphql_name='placeId',
|
|
326
|
+
default=None,
|
|
327
|
+
),
|
|
328
|
+
),
|
|
329
|
+
)
|
|
330
|
+
),
|
|
331
|
+
)
|
|
332
|
+
access_rules = sgqlc.types.Field(sgqlc.types.list_of(MinimalGeoAccessRule), graphql_name='accessRules')
|
|
333
|
+
layer_changelog = sgqlc.types.Field(
|
|
334
|
+
sgqlc.types.list_of(LayerChangelogEntry),
|
|
335
|
+
graphql_name='layerChangelog',
|
|
336
|
+
args=sgqlc.types.ArgDict(
|
|
337
|
+
(
|
|
338
|
+
(
|
|
339
|
+
'inputs',
|
|
340
|
+
sgqlc.types.Arg(LayerChangelogInput, graphql_name='inputs', default=None),
|
|
341
|
+
),
|
|
342
|
+
)
|
|
343
|
+
),
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
########################################################################
|
|
348
|
+
# Unions
|
|
349
|
+
########################################################################
|
|
350
|
+
|
|
351
|
+
########################################################################
|
|
352
|
+
# Schema Entry Points
|
|
353
|
+
########################################################################
|
|
354
|
+
schema.query_type = Query
|
|
355
|
+
schema.mutation_type = Mutation
|
|
356
|
+
schema.subscription_type = None
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Session logging utility that captures all stdout/stderr to timestamped log files."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
from .storage import get_app_directory
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import TextIO
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TeeStream:
|
|
11
|
+
"""A stream that writes to multiple outputs (console + log file)."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, original_stream: TextIO, log_file: TextIO, stream_name: str) -> None:
|
|
14
|
+
self.original_stream = original_stream
|
|
15
|
+
self.log_file = log_file
|
|
16
|
+
self.stream_name = stream_name
|
|
17
|
+
|
|
18
|
+
def write(self, text: str) -> None:
|
|
19
|
+
# Write to original stream (console)
|
|
20
|
+
self.original_stream.write(text)
|
|
21
|
+
|
|
22
|
+
# Write to log file with stream prefix for clarity
|
|
23
|
+
if text.strip(): # Only add prefix for non-empty content
|
|
24
|
+
timestamp = datetime.now().strftime('%H:%M:%S')
|
|
25
|
+
self.log_file.write(f'[{timestamp}] [{self.stream_name}] {text}')
|
|
26
|
+
else:
|
|
27
|
+
self.log_file.write(text)
|
|
28
|
+
|
|
29
|
+
# Ensure immediate writing
|
|
30
|
+
self.original_stream.flush()
|
|
31
|
+
self.log_file.flush()
|
|
32
|
+
|
|
33
|
+
def flush(self) -> None:
|
|
34
|
+
self.original_stream.flush()
|
|
35
|
+
self.log_file.flush()
|
|
36
|
+
|
|
37
|
+
def __getattr__(self, name):
|
|
38
|
+
# Delegate any other attributes to the original stream
|
|
39
|
+
return getattr(self.original_stream, name)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class SessionLogger:
|
|
43
|
+
"""Manages session-wide logging to timestamped files."""
|
|
44
|
+
|
|
45
|
+
def __init__(self) -> None:
|
|
46
|
+
self.log_file = None
|
|
47
|
+
self.original_stdout = None
|
|
48
|
+
self.original_stderr = None
|
|
49
|
+
self.started = False
|
|
50
|
+
|
|
51
|
+
def start_logging(self) -> None:
|
|
52
|
+
"""Start capturing stdout and stderr to a timestamped log file."""
|
|
53
|
+
if self.started:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
# Create timestamped log file name
|
|
57
|
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
58
|
+
log_filename = f'nefino-geosync_{timestamp}.log'
|
|
59
|
+
log_path = os.path.join(get_app_directory(), log_filename)
|
|
60
|
+
|
|
61
|
+
# Open log file
|
|
62
|
+
self.log_file = open(log_path, 'w', encoding='utf-8')
|
|
63
|
+
|
|
64
|
+
# Write session header
|
|
65
|
+
self.log_file.write('=== Nefino GeoSync Session Log ===\n')
|
|
66
|
+
self.log_file.write(f'Started: {datetime.now().isoformat()}\n')
|
|
67
|
+
self.log_file.write(f'Log file: {log_path}\n')
|
|
68
|
+
self.log_file.write('=' * 50 + '\n\n')
|
|
69
|
+
|
|
70
|
+
# Store original streams
|
|
71
|
+
self.original_stdout = sys.stdout
|
|
72
|
+
self.original_stderr = sys.stderr
|
|
73
|
+
|
|
74
|
+
# Replace with tee streams
|
|
75
|
+
sys.stdout = TeeStream(self.original_stdout, self.log_file, 'STDOUT')
|
|
76
|
+
sys.stderr = TeeStream(self.original_stderr, self.log_file, 'STDERR')
|
|
77
|
+
|
|
78
|
+
self.started = True
|
|
79
|
+
print(f'Session logging started: {log_path}')
|
|
80
|
+
|
|
81
|
+
def stop_logging(self) -> None:
|
|
82
|
+
"""Stop logging and restore original streams."""
|
|
83
|
+
if not self.started:
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
print('Session logging stopped.')
|
|
87
|
+
|
|
88
|
+
# Restore original streams
|
|
89
|
+
sys.stdout = self.original_stdout
|
|
90
|
+
sys.stderr = self.original_stderr
|
|
91
|
+
|
|
92
|
+
# Write session footer and close log file
|
|
93
|
+
if self.log_file:
|
|
94
|
+
self.log_file.write('\n' + '=' * 50 + '\n')
|
|
95
|
+
self.log_file.write(f'Session ended: {datetime.now().isoformat()}\n')
|
|
96
|
+
self.log_file.close()
|
|
97
|
+
|
|
98
|
+
self.started = False
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# Global session logger instance
|
|
102
|
+
_session_logger = SessionLogger()
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def start_session_logging() -> None:
|
|
106
|
+
"""Start session-wide logging."""
|
|
107
|
+
_session_logger.start_logging()
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def stop_session_logging() -> None:
|
|
111
|
+
"""Stop session-wide logging."""
|
|
112
|
+
_session_logger.stop_logging()
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from .api_client import (
|
|
3
|
+
general_availability_operation,
|
|
4
|
+
local_availability_operation,
|
|
5
|
+
start_analyses_operation,
|
|
6
|
+
)
|
|
7
|
+
from .compose_requests import compose_complete_requests
|
|
8
|
+
from .download_completed_analyses import download_completed_analyses
|
|
9
|
+
from .graphql_errors import check_errors
|
|
10
|
+
from .journal import Journal
|
|
11
|
+
from .layer_changelog import LayerChangelogResult
|
|
12
|
+
from .parse_args import parse_args
|
|
13
|
+
from sgqlc.endpoint.http import HTTPEndpoint
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
AnalysesMutationResult = Any
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def start_analyses(client: HTTPEndpoint, changelog_result: LayerChangelogResult = None) -> AnalysesMutationResult:
|
|
20
|
+
"""Starts the analyses for all updated data."""
|
|
21
|
+
journal = Journal.singleton()
|
|
22
|
+
parse_args()
|
|
23
|
+
# Get information about our permissions and the general availability of layers
|
|
24
|
+
general_op = general_availability_operation()
|
|
25
|
+
print('Checking for layers to update...')
|
|
26
|
+
general_data = client(general_op)
|
|
27
|
+
check_errors(general_data, 'Failed to fetch general layer availability')
|
|
28
|
+
general_availability = general_op + general_data
|
|
29
|
+
|
|
30
|
+
# Get information about the availability of layers in specific areas
|
|
31
|
+
local_op = local_availability_operation(general_availability)
|
|
32
|
+
local_data = client(local_op)
|
|
33
|
+
check_errors(local_data, 'Failed to fetch regional layer availability')
|
|
34
|
+
local_availability = local_op + local_data
|
|
35
|
+
# Clear previous analysis requested layers
|
|
36
|
+
journal.clear_analysis_requested_layers()
|
|
37
|
+
# Start the analyses
|
|
38
|
+
analysis_inputs = compose_complete_requests(general_availability, local_availability, changelog_result)
|
|
39
|
+
if len(analysis_inputs) == 0:
|
|
40
|
+
# We can only check for layer having been unpacked already.
|
|
41
|
+
# So if we're here, we've already unpacked all latest layers.
|
|
42
|
+
print('✅ No layers to update. Done.')
|
|
43
|
+
sys.exit(0)
|
|
44
|
+
for federal_state_key in analysis_inputs:
|
|
45
|
+
print(f'Starting analysis for {federal_state_key}')
|
|
46
|
+
analyses_op = start_analyses_operation({federal_state_key: analysis_inputs[federal_state_key]})
|
|
47
|
+
print(f'Started analysis for {federal_state_key}. Waiting for completion...')
|
|
48
|
+
analyses_data = client(analyses_op)
|
|
49
|
+
check_errors(analyses_data, f'Failed to start analysis for {federal_state_key}')
|
|
50
|
+
analyses = analyses_op + analyses_data
|
|
51
|
+
|
|
52
|
+
# Add the analyses to the journal
|
|
53
|
+
journal.record_analyses_requested(analyses, analysis_inputs)
|
|
54
|
+
print(f'Analysis for {federal_state_key} finished')
|
|
55
|
+
download_completed_analyses(client)
|
|
56
|
+
|
|
57
|
+
return analyses
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""This module handles creating and finding directories where the app should store its files."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import platform
|
|
5
|
+
|
|
6
|
+
APP_NAME='nefino-geosync'
|
|
7
|
+
|
|
8
|
+
def get_app_directory() -> str:
|
|
9
|
+
"""Returns the directory where the app should store its files.
|
|
10
|
+
Creates it if it doesn't exist."""
|
|
11
|
+
system = platform.system()
|
|
12
|
+
|
|
13
|
+
if system == 'Windows':
|
|
14
|
+
base_dir = os.path.join(os.getenv('APPDATA'), APP_NAME)
|
|
15
|
+
elif system == 'Darwin': # macOS
|
|
16
|
+
base_dir = os.path.join(os.path.expanduser('~'), 'Library', 'Application Support', APP_NAME)
|
|
17
|
+
else: # Linux and other Unix-like systems
|
|
18
|
+
base_dir = os.path.join(os.path.expanduser('~'), f'.{APP_NAME}')
|
|
19
|
+
|
|
20
|
+
if not os.path.exists(base_dir):
|
|
21
|
+
os.makedirs(base_dir)
|
|
22
|
+
|
|
23
|
+
return base_dir
|
|
24
|
+
|
|
25
|
+
def get_download_directory(pk: str) -> str:
|
|
26
|
+
"""Returns the directory where the app should store downloaded analyses.
|
|
27
|
+
Creates it if it doesn't exist."""
|
|
28
|
+
downloads_dir = os.path.join(get_app_directory(), 'downloads', pk)
|
|
29
|
+
|
|
30
|
+
if not os.path.exists(downloads_dir):
|
|
31
|
+
os.makedirs(downloads_dir)
|
|
32
|
+
|
|
33
|
+
return downloads_dir
|
|
34
|
+
|
|
35
|
+
def get_output_path(state: str, cluster: str) -> str:
|
|
36
|
+
"""Returns the path to store the latest version of downloaded files."""
|
|
37
|
+
output_dir = os.path.join(get_app_directory(), "newestData", state, cluster)
|
|
38
|
+
if not os.path.exists(output_dir):
|
|
39
|
+
os.makedirs(output_dir)
|
|
40
|
+
return output_dir
|