nefino-geosync 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,11 @@
1
+
2
+ class AccessRuleFilter:
3
+ def __init__(self, access_rules):
4
+ self.access_rules = access_rules
5
+
6
+ def check(self, place, cluster):
7
+ for access_rule in self.access_rules:
8
+ if place in access_rule.places:
9
+ if access_rule.all_clusters_enabled or cluster in access_rule.clusters:
10
+ return True
11
+ return False
@@ -0,0 +1,119 @@
1
+ """This module handles the API client for the Nefino API.
2
+ If you want to use the Nefino API for something other than fetching the latest geodata,
3
+ you can use this client to interact with the API directly.
4
+ """
5
+
6
+ from .config import Config
7
+ from .schema import GeoAnalysisInput, PlaceTypeGeo, schema
8
+ from sgqlc.endpoint.http import HTTPEndpoint
9
+ from sgqlc.operation import Operation
10
+ from typing import Any, Dict, List
11
+
12
+
13
+ def get_client(api_host: str = 'https://api.nefino.li') -> HTTPEndpoint:
14
+ """Returns an HTTP client for the Nefino API."""
15
+ headers = {'Authorization': Config.singleton().api_key}
16
+ return HTTPEndpoint(f'{api_host}/external', headers)
17
+
18
+
19
+ def general_availability_operation() -> Operation:
20
+ """Returns the general availability of layers and access permissions from Nefino API."""
21
+ operation = Operation(schema.Query)
22
+ analysis_areas = operation.allowed_analysis_areas()
23
+ analysis_areas.all_areas_enabled()
24
+ analysis_areas.enabled_states().place_id()
25
+
26
+ access_rules = operation.access_rules()
27
+ access_rules.all_clusters_enabled()
28
+ access_rules.clusters()
29
+ access_rules.places()
30
+
31
+ clusters = operation.clusters()
32
+ clusters.name()
33
+ clusters.has_access()
34
+ layers = clusters.layers()
35
+ layers.name()
36
+ layers.last_update()
37
+ layers.is_regional()
38
+ layers.pre_buffer()
39
+ return operation
40
+
41
+
42
+ # any is the most specific type we can write for the results from the availability query
43
+ # this is a limitation of sgqlc types
44
+ # GitHub issue: https://github.com/profusion/sgqlc/issues/129
45
+ GeneralAvailabilityResult = Any
46
+ LocalAvailabilityResult = Any
47
+
48
+
49
+ def local_availability_operation(
50
+ availability_result: GeneralAvailabilityResult,
51
+ ) -> Operation:
52
+ """Builds an operation to determine location-specific details of all layers."""
53
+ operation = Operation(schema.Query)
54
+ for state in build_states_list(availability_result):
55
+ regional_layers = operation.regional_layers(
56
+ # if you request the same field multiple times with different arguments,
57
+ # you need to give each copy a unique alias
58
+ __alias__=f'regionalLayers_{state}',
59
+ place_id=state,
60
+ place_type=PlaceTypeGeo('FEDERAL_STATE_GEO'),
61
+ )
62
+ regional_layers.name()
63
+ regional_layers.last_update()
64
+ return operation
65
+
66
+
67
+ def build_states_list(availability_result: GeneralAvailabilityResult) -> List[str]:
68
+ """Returns a list of states from the availability result."""
69
+ if availability_result.allowed_analysis_areas is None:
70
+ return []
71
+ if availability_result.allowed_analysis_areas.all_areas_enabled:
72
+ # DE1 to DEG are the place_ids for the German states (EU scheme)
73
+ return [f'DE{i}' for i in list('123456789ABCDEFG')]
74
+ return [state.place_id for state in availability_result.allowed_analysis_areas.enabled_states]
75
+
76
+
77
+ def start_analyses_operation(inputs: Dict[str, GeoAnalysisInput]) -> Operation:
78
+ """Builds an operation to start analyses with the given inputs."""
79
+ operation = Operation(schema.Mutation)
80
+ for state, input_data in inputs.items():
81
+ start_analysis = operation.start_analysis(inputs=input_data, __alias__=f'startAnalysis_{state}')
82
+ start_analysis.pk()
83
+ start_analysis.status()
84
+ start_analysis.url()
85
+ return operation
86
+
87
+
88
+ def get_analyses_operation() -> Operation:
89
+ """Builds an operation to get all analyses."""
90
+ operation = Operation(schema.Query)
91
+ analyses = operation.analysis_metadata()
92
+ analyses.pk()
93
+ analyses.status()
94
+ analyses.url()
95
+ analyses.started_at()
96
+ return operation
97
+
98
+
99
+ def layer_changelog_operation(timestamp_start: str = None) -> Operation:
100
+ """Builds an operation to get layer changelog entries."""
101
+ operation = Operation(schema.Query)
102
+
103
+ # Build the input object for the changelog query
104
+ changelog_input = {}
105
+ if timestamp_start:
106
+ changelog_input['timestampStart'] = timestamp_start
107
+
108
+ changelog = operation.layer_changelog(inputs=changelog_input)
109
+ changelog.layer_name()
110
+ changelog.timestamp()
111
+ changelog.action()
112
+ changelog.changed_fields()
113
+ changelog.attributes()
114
+ changelog.layer_id()
115
+ changelog.last_update()
116
+ changelog.cluster_name()
117
+ changelog.cluster_id()
118
+
119
+ return operation
@@ -0,0 +1,134 @@
1
+ from .access_rule_filter import AccessRuleFilter
2
+ from .api_client import (
3
+ GeneralAvailabilityResult,
4
+ LocalAvailabilityResult,
5
+ build_states_list,
6
+ )
7
+ from .config import Config
8
+ from .journal import Journal
9
+ from .layer_changelog import LayerChangelogResult, layer_has_relevant_changes_in_changelog
10
+ from .parse_args import parse_args
11
+ from .schema import (
12
+ CoordinateInput,
13
+ GeoAnalysisInput,
14
+ GeoAnalysisLayerInput,
15
+ GeoAnalysisObjectInput,
16
+ GeoAnalysisOutputFormatInput,
17
+ GeoAnalysisRequestInput,
18
+ GeoAnalysisScopeInput,
19
+ ScopeType,
20
+ )
21
+ from typing import Dict, List, Set
22
+
23
+ # Place analyses require a dummy coordinate. It will be ignored in calculations.
24
+ DUMMY_COORDINATE = CoordinateInput(lon=9.0, lat=52.0)
25
+ # The API requires input of combining operations, even if they are not used.
26
+ DUMMY_OPERATIONS = []
27
+
28
+
29
+ def compose_complete_requests(
30
+ general_availability: GeneralAvailabilityResult,
31
+ local_availability: LocalAvailabilityResult,
32
+ changelog_result: LayerChangelogResult = None,
33
+ ) -> Dict[str, GeoAnalysisInput]:
34
+ """Use fetched data to build the complete requests for all available layers."""
35
+ available_states = build_states_list(general_availability)
36
+
37
+ # Log the list of available federal states
38
+ if available_states:
39
+ print(f'📍 Checking {len(available_states)} available federal state(s): {", ".join(sorted(available_states))}')
40
+ else:
41
+ print('⚠️ No federal states available for your account')
42
+ return {}
43
+
44
+ requests_as_tuples = [
45
+ (state, compose_single_request(state, general_availability, local_availability, changelog_result))
46
+ for state in available_states
47
+ ]
48
+
49
+ # Filter out None requests and notify user about up-to-date states
50
+ result = {}
51
+ for state, request in requests_as_tuples:
52
+ if request is not None:
53
+ result[state] = request
54
+ else:
55
+ print(f'✅ {state} is up-to-date')
56
+
57
+ return result
58
+
59
+
60
+ def compose_layer_inputs(
61
+ layers: list, local_layers: Set[str], state: str, cluster_name: str, changelog_result: LayerChangelogResult = None
62
+ ) -> List[GeoAnalysisLayerInput]:
63
+ """Build a list of layer inputs from output lists."""
64
+ args = parse_args()
65
+ journal = Journal.singleton()
66
+ updated_layers = []
67
+
68
+ print(f' 🔍 Checking layers in cluster {cluster_name} for {state}...')
69
+
70
+ for layer in layers:
71
+ # Check if layer should be processed
72
+ is_available = (not layer.is_regional) or (layer.name in local_layers)
73
+ needs_update = journal.is_newer_than_saved(layer.name, state, layer.last_update)
74
+ has_relevant_changes = layer_has_relevant_changes_in_changelog(changelog_result, layer.name, cluster_name)
75
+
76
+ if is_available and (needs_update or has_relevant_changes):
77
+ updated_layers.append(layer)
78
+ if args.verbose:
79
+ reason = 'last update' if needs_update else 'relevant changes'
80
+ print(f' 📄 {layer.name} needs update ({reason}: {layer.last_update})')
81
+
82
+ if updated_layers:
83
+ print(f' ⚡ Found {len(updated_layers)} in cluster {cluster_name} layers to update for {state}')
84
+ else:
85
+ print(f' ✅ All layers are up-to-date in cluster {cluster_name} for {state}')
86
+
87
+ return [GeoAnalysisLayerInput(layer_name=layer.name, buffer_m=[layer.pre_buffer]) for layer in updated_layers]
88
+
89
+
90
+ def compose_single_request(
91
+ state: str,
92
+ general_availability: GeneralAvailabilityResult,
93
+ local_availability: LocalAvailabilityResult,
94
+ changelog_result: LayerChangelogResult = None,
95
+ ) -> GeoAnalysisInput:
96
+ """Build a single request for a given state."""
97
+ print(f'🔍 Checking layers for {state}...')
98
+
99
+ config = Config.singleton()
100
+ rules = AccessRuleFilter(general_availability.access_rules)
101
+ # specify the data we want to add to the analysis
102
+ state_local_layers = {layer.name for layer in local_availability[f'regionalLayers_{state}']}
103
+
104
+ for skip_layer in config.skip_layers:
105
+ state_local_layers.discard(skip_layer)
106
+
107
+ requests_as_tuples = [
108
+ (cluster, compose_layer_inputs(cluster.layers, state_local_layers, state, cluster.name, changelog_result))
109
+ for cluster in general_availability.clusters
110
+ if cluster.has_access and rules.check(state, cluster.name)
111
+ ]
112
+
113
+ requests = [
114
+ GeoAnalysisRequestInput(cluster_name=cluster.name, layers=layers)
115
+ for (cluster, layers) in requests_as_tuples
116
+ if len(layers) > 0
117
+ ]
118
+
119
+ if len(requests) == 0:
120
+ return None
121
+ # Specify the output format
122
+ # TODO: this should be configurable
123
+ output = GeoAnalysisOutputFormatInput(template_name='default', type=config.output_format, crs=config.crs)
124
+ # specify where the analysis should be done
125
+ scope = GeoAnalysisScopeInput(place=state, type=ScopeType('FEDERAL_STATE'))
126
+ # put everything together into a specification for an analysis
127
+ spec = GeoAnalysisObjectInput(
128
+ coordinate=DUMMY_COORDINATE,
129
+ output=output,
130
+ scope=scope,
131
+ requests=requests,
132
+ operations=DUMMY_OPERATIONS,
133
+ )
134
+ return GeoAnalysisInput(name=f'sync_{state}', specs=spec)
@@ -0,0 +1,82 @@
1
+ import json
2
+ import os
3
+ from typing import List
4
+
5
+ import questionary
6
+ from .schema import CRSType, OutputObjectType
7
+ from .storage import get_app_directory
8
+
9
+
10
+ class Config:
11
+ """This class handles storing and retrieving user preferences."""
12
+ # This is a singleton class. There should only be one instance of Config.
13
+ _instance = None
14
+
15
+ @classmethod
16
+ def singleton(cls):
17
+ """Returns the singleton instance of Journal."""
18
+ if not cls._instance:
19
+ cls._instance = Config()
20
+ return cls._instance
21
+
22
+ @property
23
+ def _config_file_path(self) -> str:
24
+ """Returns the path to the config file."""
25
+ return os.path.join(get_app_directory(), "config.json")
26
+
27
+ def __init__(self):
28
+ if Config._instance:
29
+ raise Exception("Config is a singleton class. Use Config.singleton() to get the instance.")
30
+ self.already_prompted = False
31
+ if not os.path.exists(self._config_file_path):
32
+ self.run_config_prompts(missing_config=True)
33
+ else:
34
+ with open(self._config_file_path, "r") as f:
35
+ config = json.load(f)
36
+ self.output_path: str = config['output_path']
37
+ self.output_format: OutputObjectType = OutputObjectType(
38
+ config['output_format'])
39
+ self.crs: CRSType = CRSType(config['crs'])
40
+ self.skip_layers: List[str] = config['skip_layers']
41
+ self.api_key: str = config['api_key']
42
+
43
+ def save(self):
44
+ """Saves the config to a file."""
45
+ with open(self._config_file_path, "w") as f:
46
+ json.dump({
47
+ 'output_path': self.output_path,
48
+ 'output_format': self.output_format,
49
+ 'skip_layers': self.skip_layers,
50
+ 'api_key': self.api_key,
51
+ 'crs': self.crs
52
+ }, f)
53
+
54
+ def run_config_prompts(self, missing_config: bool = False):
55
+ """Runs the configuration wizard."""
56
+ self.output_path = questionary.text(
57
+ "Where do you want to collect downloaded geodata files?",
58
+ default=os.path.join(get_app_directory(), "newestData") \
59
+ if missing_config else self.output_path).ask()
60
+ self.output_format = OutputObjectType(
61
+ questionary.select(
62
+ "What format do you want to use for the output files?",
63
+ instruction="Changing this value after first run will require wiping the downloaded data.",
64
+ choices=['GPKG', 'SHP'],
65
+ default='GPKG' if missing_config else self.output_format
66
+ ).ask())
67
+ self.crs = CRSType(
68
+ questionary.select(
69
+ "What coordinate reference system do you want to use?",
70
+ choices=[crs for crs in CRSType],
71
+ default='EPSG_4326' if missing_config else self.crs
72
+ ).ask())
73
+ self.api_key = questionary.text(
74
+ "Enter your API key:",
75
+ default="" if missing_config else self.api_key).ask()
76
+ skip_layer_string = questionary.text(
77
+ "Enter the names of any layers you want to skip downloading, separated by commas.",
78
+ instruction="Layer names can be found on https://docs.nefino.li/geo.",
79
+ default="" if missing_config else ",".join(self.skip_layers)).ask()
80
+ self.skip_layers = [] if skip_layer_string == "" else skip_layer_string.split(",")
81
+ self.save()
82
+ self.already_prompted = True
@@ -0,0 +1,131 @@
1
+ import os
2
+ import re
3
+ import zipfile
4
+ from .config import Config
5
+ from .get_downloadable_analyses import AnalysisResult
6
+ from .journal import Journal
7
+ from .storage import get_download_directory
8
+ from datetime import datetime
9
+ from shutil import move, rmtree
10
+ from urllib.request import urlretrieve
11
+
12
+
13
+ def download_analysis(analysis: AnalysisResult) -> None:
14
+ """Downloads the analysis to the local machine."""
15
+ journal = Journal.singleton()
16
+ download_dir = get_download_directory(analysis.pk)
17
+ download_file = os.path.join(download_dir, 'download.zip')
18
+ if os.path.exists(download_file):
19
+ # remove any failed download
20
+ os.remove(download_file)
21
+ urlretrieve(analysis.url.replace(' ', '%20'), download_file)
22
+ with zipfile.ZipFile(download_file, 'r') as zip_ref:
23
+ zip_ref.extractall(download_dir)
24
+ zip_root = get_zip_root(download_dir)
25
+ unpack_items(zip_root, analysis.pk, analysis.started_at)
26
+ journal.record_analysis_synced(analysis.pk)
27
+
28
+
29
+ def get_zip_root(download_dir: str) -> str:
30
+ """Returns the root directory of the extracted zip file."""
31
+ # earlier we had a heavily nested structure
32
+ return download_dir
33
+
34
+
35
+ FILE_NAME_PATTERN = re.compile(r'(?P<layer>^.*?)(?P<buffer>__[0-9]+m)?(?P<ext>\..{3,4}$)')
36
+
37
+
38
+ def unpack_items(zip_root: str, pk: str, started_at: datetime) -> None:
39
+ """
40
+ Unpacks the layers from the zip file.
41
+
42
+ Args:
43
+ zip_root: Path to the root directory of the extracted zip
44
+ pk: Primary key of the analysis
45
+ started_at: Timestamp when the analysis started
46
+ """
47
+ journal = Journal.singleton()
48
+ config = Config.singleton()
49
+
50
+ if pk not in journal.analysis_states:
51
+ print(f'Analysis {pk} not found in journal; skipping download')
52
+ return
53
+
54
+ state = journal.get_state_for_analysis(pk)
55
+ base_path = get_base_path(zip_root)
56
+
57
+ # Iterate through cluster folders inside the analysis subfolder
58
+ for cluster in (
59
+ f for f in os.listdir(base_path) if f != 'analysis_area' and os.path.isdir(os.path.join(base_path, f))
60
+ ):
61
+ cluster_dir = os.path.join(base_path, cluster)
62
+
63
+ for file in os.listdir(cluster_dir):
64
+ if journal.is_newer_than_saved(file, state, started_at):
65
+ output_dir = os.path.join(config.output_path, state)
66
+ if not os.path.exists(output_dir):
67
+ os.makedirs(output_dir)
68
+
69
+ file_path = os.path.join(cluster_dir, file)
70
+ match = re.match(FILE_NAME_PATTERN, file)
71
+ layer, ext = (match.group('layer'), match.group('ext'))
72
+
73
+ # Remove any existing files for the same layer
74
+ # this is important to avoid confusion if the pre-buffer changes
75
+ for matching_file in (f for f in os.listdir(output_dir) if f.startswith(layer)):
76
+ output_match = re.match(FILE_NAME_PATTERN, matching_file)
77
+ # only remove files that match the layer and extension
78
+ # otherwise, only the last extension to be unpacked would survive
79
+ # also, we are double-checking the layer name here in case we have
80
+ # a layer name which starts with a different layer's name
81
+ if output_match.group('layer') == layer and output_match.group('ext') == ext:
82
+ os.remove(os.path.join(output_dir, matching_file))
83
+
84
+ move(file_path, output_dir)
85
+
86
+ # Update the journal to mark layers as updated. We might have empty layers so we do set all requested layers as
87
+ # updated.
88
+ if pk in journal.analysis_requested_layers:
89
+ layers_to_mark_updated = journal.analysis_requested_layers[pk]
90
+ else:
91
+ # Fallback: extract layer names from the ZIP file structure as a safety net
92
+ print(f'⚠️ Warning: No recorded requested layers for analysis {pk}. Extracting from ZIP structure.')
93
+ layers_to_mark_updated = set()
94
+ for cluster in (
95
+ f for f in os.listdir(base_path) if f != 'analysis_area' and os.path.isdir(os.path.join(base_path, f))
96
+ ):
97
+ cluster_dir = os.path.join(base_path, cluster)
98
+ for file in os.listdir(cluster_dir):
99
+ match = re.match(FILE_NAME_PATTERN, file)
100
+ if match:
101
+ layers_to_mark_updated.add(match.group('layer'))
102
+
103
+ print(f'Recording {len(layers_to_mark_updated)} requested layers as updated for state {state}')
104
+
105
+ journal.record_layers_unpacked(layers_to_mark_updated, state, started_at)
106
+ rmtree(zip_root)
107
+
108
+
109
+ def get_base_path(zip_root: str) -> str:
110
+ """
111
+ Returns the base path for the analysis files in the ZIP structure.
112
+
113
+ Handles two different ZIP structures:
114
+ - Old structure: analysis_summary.xlsx and cluster folders directly in ZIP root
115
+ - New structure: analysis_summary.xlsx and cluster folders inside a dedicated subfolder
116
+
117
+ The presence of analysis_summary.xlsx in the root directory is used to determine
118
+ which structure we're dealing with.
119
+
120
+ Args:
121
+ zip_root: Path to the root directory of the extracted ZIP file
122
+
123
+ Returns:
124
+ str: Path to the directory containing the cluster folders and analysis_summary.xlsx
125
+ """
126
+ if 'analysis_summary.xlsx' in os.listdir(zip_root):
127
+ # Old structure - use zip_root
128
+ return zip_root
129
+ # Get the analysis subfolder name (first and only directory in zip_root)
130
+ analysis_subfolder = next(f for f in os.listdir(zip_root) if os.path.isdir(os.path.join(zip_root, f)))
131
+ return os.path.join(zip_root, analysis_subfolder)
@@ -0,0 +1,21 @@
1
+ from .download_analysis import download_analysis
2
+ from .get_downloadable_analyses import get_downloadable_analyses
3
+ from .journal import Journal
4
+ from .parse_args import parse_args
5
+ from sgqlc.endpoint.http import HTTPEndpoint
6
+
7
+
8
+ def download_completed_analyses(client: HTTPEndpoint) -> None:
9
+ """Downloads the analyses that have been completed."""
10
+ journal = Journal.singleton()
11
+ args = parse_args()
12
+ for analysis in get_downloadable_analyses(client):
13
+ if analysis.pk not in journal.synced_analyses:
14
+ if analysis.pk in journal.analysis_states:
15
+ if analysis.pk not in journal.analysis_requested_layers:
16
+ print(f'⚠️ Warning: Analysis {analysis.pk} found but has no recorded requested layers. Skipping.')
17
+ continue
18
+ download_analysis(analysis)
19
+ print(f'Downloaded analysis {analysis.pk}')
20
+ elif args.verbose:
21
+ print(f'Analysis {analysis.pk} already downloaded')
@@ -0,0 +1,48 @@
1
+ from .api_client import get_analyses_operation
2
+ from .graphql_errors import check_errors
3
+ from .parse_args import parse_args
4
+ from .schema import DateTime, Status
5
+ from sgqlc.endpoint.http import HTTPEndpoint
6
+ from time import sleep
7
+ from typing import Generator, Protocol
8
+
9
+
10
+ # Let's give a quick description of what we want to be fetching.
11
+ # This does depend on what get_analysis_operation() actually does.
12
+ class AnalysisResult(Protocol):
13
+ status: Status
14
+ pk: str
15
+ url: str
16
+ started_at: DateTime
17
+
18
+
19
+ def get_downloadable_analyses(
20
+ client: HTTPEndpoint,
21
+ ) -> Generator[AnalysisResult, None, None]:
22
+ """Yields analyses that are available for download.
23
+ Polls for more analyses and yields them until no more are available.
24
+ """
25
+ verbose = parse_args().verbose
26
+ op = get_analyses_operation()
27
+ reported_pks = set()
28
+ print('Checking for analyses to download...')
29
+ while True:
30
+ data = client(op)
31
+ check_errors(data, 'Failed to fetch analysis status')
32
+ analyses = op + data
33
+ found_outstanding_analysis = False
34
+
35
+ for analysis in analyses.analysis_metadata:
36
+ if analysis.status == Status('PENDING') or analysis.status == Status('RUNNING'):
37
+ if verbose:
38
+ print(f'Analysis {analysis.pk} is still pending or running.')
39
+ found_outstanding_analysis = True
40
+ if analysis.status == Status('SUCCESS') and analysis.pk not in reported_pks:
41
+ reported_pks.add(analysis.pk)
42
+ yield analysis
43
+
44
+ if not found_outstanding_analysis:
45
+ break
46
+ if verbose:
47
+ print('Waiting for more analyses to finish...')
48
+ sleep(10)
@@ -0,0 +1,73 @@
1
+ import html
2
+ import json
3
+ import re
4
+ import sys
5
+ from .parse_args import parse_args
6
+ from datetime import datetime
7
+ from prompt_toolkit import print_formatted_text
8
+ from prompt_toolkit.formatted_text import HTML
9
+
10
+
11
+ def check_errors(data: dict, context: str = None) -> None:
12
+ """Check for errors in a GraphQL response."""
13
+ args = parse_args()
14
+ if 'errors' in data:
15
+ if args.verbose:
16
+ pp('<b>GraphQL operation with errors:</b> ' + html.escape(json.dumps(data, indent=4)))
17
+
18
+ if is_token_invalid(data):
19
+ pp(
20
+ '<b fg="red">ERROR:</b> Invalid token. Please run <b>nefino-geosync --configure</b> and double-check your API key.'
21
+ )
22
+ else:
23
+ if not args.verbose:
24
+ try:
25
+ pp(
26
+ '<b>Received GraphQL error from server:</b> '
27
+ + html.escape(json.dumps(data['errors'], indent=4))
28
+ )
29
+ except Exception as e:
30
+ print(e)
31
+ print(data['errors'])
32
+
33
+ # Add context information if provided
34
+ if context:
35
+ pp(f'<b fg="red">Context:</b> {context}')
36
+
37
+ if not args.verbose:
38
+ pp("""<b fg="red">ERROR:</b> A GraphQL error occurred. Run with <b>--verbose</b> to see more information.
39
+ If this error persists, please contact Nefino support: https://www.nefino.de/kontakt
40
+ Exiting due to the above error.""")
41
+ else:
42
+ pp('<b fg="red">ERROR:</b> A GraphQL error occurred.')
43
+ pp(
44
+ '<b fg="red">If this error persists, please contact Nefino support: https://www.nefino.de/kontakt</b>'
45
+ )
46
+ pp('<b fg="red">Exiting due to the above error.</b>')
47
+
48
+ sys.exit(1)
49
+
50
+
51
+ def pp(to_print: str) -> None:
52
+ # Display formatted text in console
53
+ print_formatted_text(HTML(to_print))
54
+
55
+ # For logging: check if stdout has been replaced by TeeStream
56
+ # If so, write plain text directly to the log file to avoid duplication
57
+ if hasattr(sys.stdout, 'log_file'):
58
+ # Remove HTML tags for plain text logging
59
+ plain_text = re.sub(r'<[^>]+>', '', to_print)
60
+
61
+ timestamp = datetime.now().strftime('%H:%M:%S')
62
+ sys.stdout.log_file.write(f'[{timestamp}] [STDOUT] {plain_text}\n')
63
+ sys.stdout.log_file.flush()
64
+
65
+
66
+ def is_token_invalid(data: dict) -> bool:
67
+ """Check if the token is invalid."""
68
+ try:
69
+ if data['errors'][0]['extensions']['nefino_type'] == 'AuthTokenInvalid':
70
+ return True
71
+ except KeyError:
72
+ return False
73
+ return False