nefino-geosync 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nefino-geosync might be problematic. Click here for more details.

nefino_geosync/schema.py CHANGED
@@ -1,21 +1,25 @@
1
1
  import sgqlc.types
2
2
  import sgqlc.types.datetime
3
3
 
4
-
5
4
  schema = sgqlc.types.Schema()
6
5
 
7
6
 
8
-
9
7
  ########################################################################
10
8
  # Scalars and Enumerations
11
9
  ########################################################################
12
10
  Boolean = sgqlc.types.Boolean
13
11
 
12
+
14
13
  class CRSType(sgqlc.types.Enum):
15
14
  __schema__ = schema
16
15
  __choices__ = ('EPSG_25832', 'EPSG_25833', 'EPSG_3035', 'EPSG_4326')
17
16
 
18
17
 
18
+ class LayerChangeAction(sgqlc.types.Enum):
19
+ __schema__ = schema
20
+ __choices__ = ('created', 'updated', 'deleted', 'regions_changed')
21
+
22
+
19
23
  DateTime = sgqlc.types.datetime.DateTime
20
24
 
21
25
  Float = sgqlc.types.Float
@@ -24,6 +28,7 @@ ID = sgqlc.types.ID
24
28
 
25
29
  Int = sgqlc.types.Int
26
30
 
31
+
27
32
  class OutputObjectType(sgqlc.types.Enum):
28
33
  __schema__ = schema
29
34
  __choices__ = ('GPKG', 'QGIS_AND_GPKG', 'QGIS_PRJ', 'SHP')
@@ -31,17 +36,38 @@ class OutputObjectType(sgqlc.types.Enum):
31
36
 
32
37
  class PlaceTypeGeo(sgqlc.types.Enum):
33
38
  __schema__ = schema
34
- __choices__ = ('ADMINISTRATIVE_UNIT_GEO', 'COUNTRY', 'FEDERAL_STATE_GEO', 'LOCAL_ADMINISTRATIVE_UNITS_GEO', 'PLANNING_REGIONS_GEO')
39
+ __choices__ = (
40
+ 'ADMINISTRATIVE_UNIT_GEO',
41
+ 'COUNTRY',
42
+ 'FEDERAL_STATE_GEO',
43
+ 'LOCAL_ADMINISTRATIVE_UNITS_GEO',
44
+ 'PLANNING_REGIONS_GEO',
45
+ )
35
46
 
36
47
 
37
48
  class PlaceTypeNews(sgqlc.types.Enum):
38
49
  __schema__ = schema
39
- __choices__ = ('ADMINISTRATIVE_UNIT', 'COUNTRY', 'COUNTY', 'FEDERAL_STATE', 'LOCAL_ADMINISTRATIVE_UNITS', 'PLANNING_REGIONS')
50
+ __choices__ = (
51
+ 'ADMINISTRATIVE_UNIT',
52
+ 'COUNTRY',
53
+ 'COUNTY',
54
+ 'FEDERAL_STATE',
55
+ 'LOCAL_ADMINISTRATIVE_UNITS',
56
+ 'PLANNING_REGIONS',
57
+ )
40
58
 
41
59
 
42
60
  class ScopeType(sgqlc.types.Enum):
43
61
  __schema__ = schema
44
- __choices__ = ('ADMINISTRATIVE_UNIT', 'FEDERAL_STATE', 'LOCAL_ADMINISTRATIVE_UNIT', 'PLANNING_REGION', 'POLYGON', 'RADIUS', 'SQUARE')
62
+ __choices__ = (
63
+ 'ADMINISTRATIVE_UNIT',
64
+ 'FEDERAL_STATE',
65
+ 'LOCAL_ADMINISTRATIVE_UNIT',
66
+ 'PLANNING_REGION',
67
+ 'POLYGON',
68
+ 'RADIUS',
69
+ 'SQUARE',
70
+ )
45
71
 
46
72
 
47
73
  class Status(sgqlc.types.Enum):
@@ -51,11 +77,11 @@ class Status(sgqlc.types.Enum):
51
77
 
52
78
  String = sgqlc.types.String
53
79
 
80
+
54
81
  class UUID(sgqlc.types.Scalar):
55
82
  __schema__ = schema
56
83
 
57
84
 
58
-
59
85
  ########################################################################
60
86
  # Input Objects
61
87
  ########################################################################
@@ -77,7 +103,10 @@ class GeoAnalysisLayerInput(sgqlc.types.Input):
77
103
  __schema__ = schema
78
104
  __field_names__ = ('layer_name', 'buffer_m')
79
105
  layer_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='layerName')
80
- buffer_m = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Int))), graphql_name='bufferM')
106
+ buffer_m = sgqlc.types.Field(
107
+ sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Int))),
108
+ graphql_name='bufferM',
109
+ )
81
110
 
82
111
 
83
112
  class GeoAnalysisObjectInput(sgqlc.types.Input):
@@ -85,8 +114,14 @@ class GeoAnalysisObjectInput(sgqlc.types.Input):
85
114
  __field_names__ = ('coordinate', 'scope', 'requests', 'operations', 'output')
86
115
  coordinate = sgqlc.types.Field(sgqlc.types.non_null(CoordinateInput), graphql_name='coordinate')
87
116
  scope = sgqlc.types.Field(sgqlc.types.non_null('GeoAnalysisScopeInput'), graphql_name='scope')
88
- requests = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('GeoAnalysisRequestInput'))), graphql_name='requests')
89
- operations = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('GeoAnalysisOperationInput'))), graphql_name='operations')
117
+ requests = sgqlc.types.Field(
118
+ sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('GeoAnalysisRequestInput'))),
119
+ graphql_name='requests',
120
+ )
121
+ operations = sgqlc.types.Field(
122
+ sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('GeoAnalysisOperationInput'))),
123
+ graphql_name='operations',
124
+ )
90
125
  output = sgqlc.types.Field(sgqlc.types.non_null('GeoAnalysisOutputFormatInput'), graphql_name='output')
91
126
 
92
127
 
@@ -109,7 +144,10 @@ class GeoAnalysisRequestInput(sgqlc.types.Input):
109
144
  __schema__ = schema
110
145
  __field_names__ = ('cluster_name', 'layers')
111
146
  cluster_name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='clusterName')
112
- layers = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(GeoAnalysisLayerInput))), graphql_name='layers')
147
+ layers = sgqlc.types.Field(
148
+ sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(GeoAnalysisLayerInput))),
149
+ graphql_name='layers',
150
+ )
113
151
 
114
152
 
115
153
  class GeoAnalysisScopeInput(sgqlc.types.Input):
@@ -122,6 +160,27 @@ class GeoAnalysisScopeInput(sgqlc.types.Input):
122
160
  place = sgqlc.types.Field(String, graphql_name='place')
123
161
 
124
162
 
163
+ class LayerChangelogInput(sgqlc.types.Input):
164
+ __schema__ = schema
165
+ __field_names__ = (
166
+ 'layer_id',
167
+ 'cluster_id',
168
+ 'layer_name',
169
+ 'cluster_name',
170
+ 'timestamp_start',
171
+ 'timestamp_end',
172
+ 'changed_field',
173
+ 'action',
174
+ )
175
+ layer_id = sgqlc.types.Field(Int, graphql_name='layerId')
176
+ cluster_id = sgqlc.types.Field(Int, graphql_name='clusterId')
177
+ layer_name = sgqlc.types.Field(String, graphql_name='layerName')
178
+ cluster_name = sgqlc.types.Field(String, graphql_name='clusterName')
179
+ timestamp_start = sgqlc.types.Field(DateTime, graphql_name='timestampStart')
180
+ timestamp_end = sgqlc.types.Field(DateTime, graphql_name='timestampEnd')
181
+ changed_field = sgqlc.types.Field(String, graphql_name='changedField')
182
+ action = sgqlc.types.Field(LayerChangeAction, graphql_name='action')
183
+
125
184
 
126
185
  ########################################################################
127
186
  # Output Objects and Interfaces
@@ -146,7 +205,10 @@ class MinimalCluster(sgqlc.types.Type):
146
205
  __schema__ = schema
147
206
  __field_names__ = ('name', 'layers', 'has_access')
148
207
  name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
149
- layers = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('MinimalLayer'))), graphql_name='layers')
208
+ layers = sgqlc.types.Field(
209
+ sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('MinimalLayer'))),
210
+ graphql_name='layers',
211
+ )
150
212
  has_access = sgqlc.types.Field(Boolean, graphql_name='hasAccess')
151
213
 
152
214
 
@@ -167,12 +229,48 @@ class MinimalLayer(sgqlc.types.Type):
167
229
  is_regional = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='isRegional')
168
230
 
169
231
 
232
+ class LayerChangelogEntry(sgqlc.types.Type):
233
+ __schema__ = schema
234
+ __field_names__ = (
235
+ 'layer_name',
236
+ 'timestamp',
237
+ 'action',
238
+ 'changed_fields',
239
+ 'attributes',
240
+ 'layer_id',
241
+ 'last_update',
242
+ 'cluster_name',
243
+ 'cluster_id',
244
+ )
245
+ layer_name = sgqlc.types.Field(String, graphql_name='layerName')
246
+ timestamp = sgqlc.types.Field(DateTime, graphql_name='timestamp')
247
+ action = sgqlc.types.Field(LayerChangeAction, graphql_name='action')
248
+ changed_fields = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='changedFields')
249
+ attributes = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='attributes')
250
+ layer_id = sgqlc.types.Field(Int, graphql_name='layerId')
251
+ last_update = sgqlc.types.Field(DateTime, graphql_name='lastUpdate')
252
+ cluster_name = sgqlc.types.Field(String, graphql_name='clusterName')
253
+ cluster_id = sgqlc.types.Field(Int, graphql_name='clusterId')
254
+
255
+
170
256
  class Mutation(sgqlc.types.Type):
171
257
  __schema__ = schema
172
258
  __field_names__ = ('start_analysis',)
173
- start_analysis = sgqlc.types.Field(MinimalAnalysis, graphql_name='startAnalysis', args=sgqlc.types.ArgDict((
174
- ('inputs', sgqlc.types.Arg(sgqlc.types.non_null(GeoAnalysisInput), graphql_name='inputs', default=None)),
175
- ))
259
+ start_analysis = sgqlc.types.Field(
260
+ MinimalAnalysis,
261
+ graphql_name='startAnalysis',
262
+ args=sgqlc.types.ArgDict(
263
+ (
264
+ (
265
+ 'inputs',
266
+ sgqlc.types.Arg(
267
+ sgqlc.types.non_null(GeoAnalysisInput),
268
+ graphql_name='inputs',
269
+ default=None,
270
+ ),
271
+ ),
272
+ )
273
+ ),
176
274
  )
177
275
 
178
276
 
@@ -185,20 +283,65 @@ class PlaceIdentifier(sgqlc.types.Type):
185
283
 
186
284
  class Query(sgqlc.types.Type):
187
285
  __schema__ = schema
188
- __field_names__ = ('analysis_metadata', 'allowed_analysis_areas', 'clusters', 'regional_layers', 'access_rules')
189
- analysis_metadata = sgqlc.types.Field(sgqlc.types.list_of(MinimalAnalysis), graphql_name='analysisMetadata', args=sgqlc.types.ArgDict((
190
- ('analysis_id', sgqlc.types.Arg(UUID, graphql_name='analysisId', default=None)),
191
- ))
286
+ __field_names__ = (
287
+ 'analysis_metadata',
288
+ 'allowed_analysis_areas',
289
+ 'clusters',
290
+ 'regional_layers',
291
+ 'access_rules',
292
+ 'layer_changelog',
293
+ )
294
+ analysis_metadata = sgqlc.types.Field(
295
+ sgqlc.types.list_of(MinimalAnalysis),
296
+ graphql_name='analysisMetadata',
297
+ args=sgqlc.types.ArgDict(
298
+ (
299
+ (
300
+ 'analysis_id',
301
+ sgqlc.types.Arg(UUID, graphql_name='analysisId', default=None),
302
+ ),
303
+ )
304
+ ),
192
305
  )
193
306
  allowed_analysis_areas = sgqlc.types.Field(MinimalAreasEnabled, graphql_name='allowedAnalysisAreas')
194
307
  clusters = sgqlc.types.Field(sgqlc.types.list_of(MinimalCluster), graphql_name='clusters')
195
- regional_layers = sgqlc.types.Field(sgqlc.types.list_of(MinimalLayer), graphql_name='regionalLayers', args=sgqlc.types.ArgDict((
196
- ('place_type', sgqlc.types.Arg(sgqlc.types.non_null(PlaceTypeGeo), graphql_name='placeType', default=None)),
197
- ('place_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='placeId', default=None)),
198
- ))
308
+ regional_layers = sgqlc.types.Field(
309
+ sgqlc.types.list_of(MinimalLayer),
310
+ graphql_name='regionalLayers',
311
+ args=sgqlc.types.ArgDict(
312
+ (
313
+ (
314
+ 'place_type',
315
+ sgqlc.types.Arg(
316
+ sgqlc.types.non_null(PlaceTypeGeo),
317
+ graphql_name='placeType',
318
+ default=None,
319
+ ),
320
+ ),
321
+ (
322
+ 'place_id',
323
+ sgqlc.types.Arg(
324
+ sgqlc.types.non_null(String),
325
+ graphql_name='placeId',
326
+ default=None,
327
+ ),
328
+ ),
329
+ )
330
+ ),
199
331
  )
200
332
  access_rules = sgqlc.types.Field(sgqlc.types.list_of(MinimalGeoAccessRule), graphql_name='accessRules')
201
-
333
+ layer_changelog = sgqlc.types.Field(
334
+ sgqlc.types.list_of(LayerChangelogEntry),
335
+ graphql_name='layerChangelog',
336
+ args=sgqlc.types.ArgDict(
337
+ (
338
+ (
339
+ 'inputs',
340
+ sgqlc.types.Arg(LayerChangelogInput, graphql_name='inputs', default=None),
341
+ ),
342
+ )
343
+ ),
344
+ )
202
345
 
203
346
 
204
347
  ########################################################################
@@ -211,4 +354,3 @@ class Query(sgqlc.types.Type):
211
354
  schema.query_type = Query
212
355
  schema.mutation_type = Mutation
213
356
  schema.subscription_type = None
214
-
@@ -0,0 +1,112 @@
1
+ """Session logging utility that captures all stdout/stderr to timestamped log files."""
2
+
3
+ import os
4
+ import sys
5
+ from .storage import get_app_directory
6
+ from datetime import datetime
7
+ from typing import TextIO
8
+
9
+
10
+ class TeeStream:
11
+ """A stream that writes to multiple outputs (console + log file)."""
12
+
13
+ def __init__(self, original_stream: TextIO, log_file: TextIO, stream_name: str) -> None:
14
+ self.original_stream = original_stream
15
+ self.log_file = log_file
16
+ self.stream_name = stream_name
17
+
18
+ def write(self, text: str) -> None:
19
+ # Write to original stream (console)
20
+ self.original_stream.write(text)
21
+
22
+ # Write to log file with stream prefix for clarity
23
+ if text.strip(): # Only add prefix for non-empty content
24
+ timestamp = datetime.now().strftime('%H:%M:%S')
25
+ self.log_file.write(f'[{timestamp}] [{self.stream_name}] {text}')
26
+ else:
27
+ self.log_file.write(text)
28
+
29
+ # Ensure immediate writing
30
+ self.original_stream.flush()
31
+ self.log_file.flush()
32
+
33
+ def flush(self) -> None:
34
+ self.original_stream.flush()
35
+ self.log_file.flush()
36
+
37
+ def __getattr__(self, name):
38
+ # Delegate any other attributes to the original stream
39
+ return getattr(self.original_stream, name)
40
+
41
+
42
+ class SessionLogger:
43
+ """Manages session-wide logging to timestamped files."""
44
+
45
+ def __init__(self) -> None:
46
+ self.log_file = None
47
+ self.original_stdout = None
48
+ self.original_stderr = None
49
+ self.started = False
50
+
51
+ def start_logging(self) -> None:
52
+ """Start capturing stdout and stderr to a timestamped log file."""
53
+ if self.started:
54
+ return
55
+
56
+ # Create timestamped log file name
57
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
58
+ log_filename = f'nefino-geosync_{timestamp}.log'
59
+ log_path = os.path.join(get_app_directory(), log_filename)
60
+
61
+ # Open log file
62
+ self.log_file = open(log_path, 'w', encoding='utf-8')
63
+
64
+ # Write session header
65
+ self.log_file.write('=== Nefino GeoSync Session Log ===\n')
66
+ self.log_file.write(f'Started: {datetime.now().isoformat()}\n')
67
+ self.log_file.write(f'Log file: {log_path}\n')
68
+ self.log_file.write('=' * 50 + '\n\n')
69
+
70
+ # Store original streams
71
+ self.original_stdout = sys.stdout
72
+ self.original_stderr = sys.stderr
73
+
74
+ # Replace with tee streams
75
+ sys.stdout = TeeStream(self.original_stdout, self.log_file, 'STDOUT')
76
+ sys.stderr = TeeStream(self.original_stderr, self.log_file, 'STDERR')
77
+
78
+ self.started = True
79
+ print(f'Session logging started: {log_path}')
80
+
81
+ def stop_logging(self) -> None:
82
+ """Stop logging and restore original streams."""
83
+ if not self.started:
84
+ return
85
+
86
+ print('Session logging stopped.')
87
+
88
+ # Restore original streams
89
+ sys.stdout = self.original_stdout
90
+ sys.stderr = self.original_stderr
91
+
92
+ # Write session footer and close log file
93
+ if self.log_file:
94
+ self.log_file.write('\n' + '=' * 50 + '\n')
95
+ self.log_file.write(f'Session ended: {datetime.now().isoformat()}\n')
96
+ self.log_file.close()
97
+
98
+ self.started = False
99
+
100
+
101
+ # Global session logger instance
102
+ _session_logger = SessionLogger()
103
+
104
+
105
+ def start_session_logging() -> None:
106
+ """Start session-wide logging."""
107
+ _session_logger.start_logging()
108
+
109
+
110
+ def stop_session_logging() -> None:
111
+ """Stop session-wide logging."""
112
+ _session_logger.stop_logging()
@@ -1,48 +1,56 @@
1
- from typing import Any
2
- from .api_client import general_availability_operation, local_availability_operation, start_analyses_operation
1
+ import sys
2
+ from .api_client import (
3
+ general_availability_operation,
4
+ local_availability_operation,
5
+ start_analyses_operation,
6
+ )
3
7
  from .compose_requests import compose_complete_requests
4
- from .journal import Journal
8
+ from .download_completed_analyses import download_completed_analyses
5
9
  from .graphql_errors import check_errors
10
+ from .journal import Journal
11
+ from .layer_changelog import LayerChangelogResult
6
12
  from .parse_args import parse_args
7
13
  from sgqlc.endpoint.http import HTTPEndpoint
8
- from .download_completed_analyses import download_completed_analyses
14
+ from typing import Any
9
15
 
10
16
  AnalysesMutationResult = Any
11
17
 
12
- def start_analyses(client: HTTPEndpoint) -> AnalysesMutationResult:
18
+
19
+ def start_analyses(client: HTTPEndpoint, changelog_result: LayerChangelogResult = None) -> AnalysesMutationResult:
13
20
  """Starts the analyses for all updated data."""
14
21
  journal = Journal.singleton()
15
- args = parse_args()
22
+ parse_args()
16
23
  # Get information about our permissions and the general availability of layers
17
24
  general_op = general_availability_operation()
18
- print("Checking for layers to update...")
25
+ print('Checking for layers to update...')
19
26
  general_data = client(general_op)
20
- check_errors(general_data)
21
- general_availability = (general_op + general_data)
27
+ check_errors(general_data, 'Failed to fetch general layer availability')
28
+ general_availability = general_op + general_data
22
29
 
23
30
  # Get information about the availability of layers in specific areas
24
31
  local_op = local_availability_operation(general_availability)
25
32
  local_data = client(local_op)
26
- check_errors(local_data)
27
- local_availability = (local_op + local_data)
33
+ check_errors(local_data, 'Failed to fetch regional layer availability')
34
+ local_availability = local_op + local_data
28
35
 
29
36
  # Start the analyses
30
- analysis_inputs = compose_complete_requests(general_availability, local_availability)
37
+ analysis_inputs = compose_complete_requests(general_availability, local_availability, changelog_result)
31
38
  if len(analysis_inputs) == 0:
32
39
  # We can only check for layer having been unpacked already.
33
40
  # So if we're here, we've already unpacked all latest layers.
34
- print("✅ No layers to update. Done.")
35
- exit(0)
41
+ print('✅ No layers to update. Done.')
42
+ sys.exit(0)
36
43
  for federal_state_key in analysis_inputs:
37
- print(f"Starting analysis for {federal_state_key}")
44
+ print(f'Starting analysis for {federal_state_key}')
38
45
  analyses_op = start_analyses_operation({federal_state_key: analysis_inputs[federal_state_key]})
46
+ print(f'Started analysis for {federal_state_key}. Waiting for completion...')
39
47
  analyses_data = client(analyses_op)
40
- check_errors(analyses_data)
41
- analyses = (analyses_op + analyses_data)
48
+ check_errors(analyses_data, f'Failed to start analysis for {federal_state_key}')
49
+ analyses = analyses_op + analyses_data
42
50
 
43
51
  # Add the analyses to the journal
44
- journal.record_analyses_requested(analyses)
45
- print("Analysis started.")
52
+ journal.record_analyses_requested(analyses, analysis_inputs)
53
+ print(f'Analysis for {federal_state_key} finished')
46
54
  download_completed_analyses(client)
47
-
48
- return(analyses)
55
+
56
+ return analyses
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nefino-geosync
3
- Version: 0.2.0
3
+ Version: 0.2.2
4
4
  Summary: Python package to access geographical data from Nefino.LI Geo
5
5
  Project-URL: Application, https://nefino.li
6
6
  Project-URL: API, https://api.nefino.li
@@ -0,0 +1,23 @@
1
+ nefino_geosync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ nefino_geosync/access_rule_filter.py,sha256=mCSMBrXNkGojiW2Ly8D9w46emaigp9bkDSW7f2gVnBM,372
3
+ nefino_geosync/api_client.py,sha256=hUflfpp5Yo-_MalS1fdvIQfXtfUghzT9dx2WQMsCN_4,4264
4
+ nefino_geosync/compose_requests.py,sha256=3p_9Jqaq6T-Cp4iUpV5J_X8VZzznzEJIY_bmNDTlEFM,5155
5
+ nefino_geosync/config.py,sha256=thVVMsw4-CGgYJj63sX6nQShSZ9n9zEuz_9RCNL1tIM,3491
6
+ nefino_geosync/download_analysis.py,sha256=W4Z1k_A3gKbVbQ0wXgLup3ojtABwqGGzB4X4hho4mTQ,4858
7
+ nefino_geosync/download_completed_analyses.py,sha256=nXkIVAqCP_ZNfG76fwy4LPx3VD939AwNID96XKlHGJk,765
8
+ nefino_geosync/get_downloadable_analyses.py,sha256=eBEOjH6MvW9Oh-lF5FdVvRhC8sNHBulsW3Aj4dR-0fw,1694
9
+ nefino_geosync/graphql_errors.py,sha256=wmwQCALJA75vWXLGN3Q1lVso6vJWStzgLFDHeuVVNcw,2636
10
+ nefino_geosync/journal.py,sha256=mvN3ZyfUVRBMiBlAfFtrlsODMew2MCLwDM0Qa8PZJzM,7612
11
+ nefino_geosync/layer_changelog.py,sha256=4NG5uRxlXIpdziwdk2etfARUR-1dYnrhZm6RBQHAhrQ,9878
12
+ nefino_geosync/parse_args.py,sha256=75e8J7WDiueOnMuPQdvwj-w_dXMhrGCOLMNTRIFGOHA,937
13
+ nefino_geosync/run.py,sha256=Chv-eAQDNlIFxv3F4EFhTbqIzIyzO7vgSYSgGgUVCtI,1898
14
+ nefino_geosync/schema.json,sha256=yBHwVYsfF1ybUFthGUuNpkdGkBdx_DcqZgq0ul4TIYo,71104
15
+ nefino_geosync/schema.py,sha256=Vqm3Hpm6CtDi0boR-F81PrmpNrlnDlkpVLGAXXIxL2M,12633
16
+ nefino_geosync/session_logger.py,sha256=6bpUNAaosus1XRGWyWTalzTmddrqxSgTxZoTqGnX6_k,3564
17
+ nefino_geosync/start_analyses.py,sha256=ZtIjuo5FXiGfgdwah2S6tUhO6i8n6y3SsLq2kNPQeIo,2468
18
+ nefino_geosync/storage.py,sha256=_fnE-zQnNSMfgoUratUcDGbMUJA-oatf55AXbuftFyI,1424
19
+ nefino_geosync-0.2.2.dist-info/METADATA,sha256=PjQgk7tQz2CwVMV64l3R7A5aaXgFYDEn7crnN_4cIfE,15496
20
+ nefino_geosync-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
21
+ nefino_geosync-0.2.2.dist-info/entry_points.txt,sha256=zZ7Zrx1MyxZKd2iGrvRmHjYzY5C2EMgqY5TqzNOGYqo,59
22
+ nefino_geosync-0.2.2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
23
+ nefino_geosync-0.2.2.dist-info/RECORD,,
@@ -1,21 +0,0 @@
1
- nefino_geosync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- nefino_geosync/access_rule_filter.py,sha256=mCSMBrXNkGojiW2Ly8D9w46emaigp9bkDSW7f2gVnBM,372
3
- nefino_geosync/api_client.py,sha256=ylVVEC1f6N8P77VVZ5RLp3lieVnLmrepMO1IjnBGU4o,3617
4
- nefino_geosync/compose_requests.py,sha256=oiVApnl-cxDBFeIoxaU_UREI4EXOHSCsad0i65Uh1BM,3740
5
- nefino_geosync/config.py,sha256=thVVMsw4-CGgYJj63sX6nQShSZ9n9zEuz_9RCNL1tIM,3491
6
- nefino_geosync/download_analysis.py,sha256=KysIFyDOK-W3iEXQHIVqKrZtpYhMn8SuoAOPoy45rGc,4689
7
- nefino_geosync/download_completed_analyses.py,sha256=ggCZGi8mOL2imT8iwfR8yTsdvnKHA6jyw6Dwil1TLU0,866
8
- nefino_geosync/get_downloadable_analyses.py,sha256=Om0Q6F7xRze-jvo865XN2qzRT91zmuA-TAKYmeVsGIE,1651
9
- nefino_geosync/graphql_errors.py,sha256=n92jAicdfeH7weUo1I1gTIC2JQKDybYlD9BVredSHQw,1512
10
- nefino_geosync/journal.py,sha256=5IbOBfIM-oWXCd7ZvMqx-8A069dDsW15DZ9YcnCzV0s,5271
11
- nefino_geosync/parse_args.py,sha256=75e8J7WDiueOnMuPQdvwj-w_dXMhrGCOLMNTRIFGOHA,937
12
- nefino_geosync/run.py,sha256=l4b-P5B3_CNqp6d13_ZjCBbGyp7MWIC1ZwgNfEvvZm8,914
13
- nefino_geosync/schema.json,sha256=yBHwVYsfF1ybUFthGUuNpkdGkBdx_DcqZgq0ul4TIYo,71104
14
- nefino_geosync/schema.py,sha256=ALP9_2sKh75sXrYDmTiBEBLygZMiZF-B8hahcYYJZrE,9203
15
- nefino_geosync/start_analyses.py,sha256=xWicbXvJybKb8Yi756M2cRpfys0bzmri9_eByAot0sE,2063
16
- nefino_geosync/storage.py,sha256=_fnE-zQnNSMfgoUratUcDGbMUJA-oatf55AXbuftFyI,1424
17
- nefino_geosync-0.2.0.dist-info/METADATA,sha256=u_xhglgwTiMK2VJoRMSC1KcfyH6J0VDrR0QSXBHFpvs,15496
18
- nefino_geosync-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
- nefino_geosync-0.2.0.dist-info/entry_points.txt,sha256=zZ7Zrx1MyxZKd2iGrvRmHjYzY5C2EMgqY5TqzNOGYqo,59
20
- nefino_geosync-0.2.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
21
- nefino_geosync-0.2.0.dist-info/RECORD,,