kb-dashboard-cli 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,23 @@
1
+ """Dashboard CLI Package - CLI, LSP, and future MCP server for kb-yaml-to-lens."""
2
+
3
+ from beartype import BeartypeConf
4
+ from beartype.claw import beartype_this_package
5
+ from kb_dashboard_core.dashboard_compiler import dump, load, render
6
+ from kb_dashboard_tools import KibanaClient
7
+
8
+ # Enable strict BearType checking:
9
+ # - warning_cls_on_decorator_exception=None: Raises fatal exceptions instead of warnings
10
+ # - claw_is_pep526=True: Type-check annotated variable assignments (default, explicit for clarity)
11
+ beartype_this_package(
12
+ conf=BeartypeConf(
13
+ warning_cls_on_decorator_exception=None,
14
+ claw_is_pep526=True,
15
+ )
16
+ )
17
+
18
+ __all__ = [
19
+ 'KibanaClient',
20
+ 'dump',
21
+ 'load',
22
+ 'render',
23
+ ]
@@ -0,0 +1,104 @@
1
+ """Command-line interface for the dashboard compiler.
2
+
3
+ This module serves as the entry point and command registry for the CLI.
4
+ Commands are organized into separate modules:
5
+
6
+ - cli_local: Local file operations (compile, disassemble, lsp)
7
+ - cli_remote: Remote Kibana/Elasticsearch operations (fetch, screenshot, etc.)
8
+ - cli_output: Centralized output helpers for consistent messaging
9
+ - cli_options: Reusable option decorators for Kibana and Elasticsearch
10
+ - cli_context: Context object for sharing clients across commands
11
+ """
12
+
13
+ import logging
14
+ import os
15
+ import sys
16
+ from importlib.metadata import PackageNotFoundError, version
17
+
18
+ import rich_click as click
19
+
20
+ from dashboard_compiler.cli_context import CliContext
21
+ from dashboard_compiler.cli_local import compile_dashboards, disassemble, lsp
22
+ from dashboard_compiler.cli_remote import (
23
+ export_for_issue,
24
+ extract_sample_data_command,
25
+ fetch,
26
+ load_sample_data_command,
27
+ screenshot_dashboard,
28
+ )
29
+
30
+ # Disable rich_click colors when generating documentation or when NO_COLOR is set
31
+ # This prevents ANSI escape sequences from appearing in mkdocs-click generated docs
32
+ if 'NO_COLOR' in os.environ or not sys.stdout.isatty():
33
+ click.rich_click.COLOR_SYSTEM = None
34
+ click.rich_click.FORCE_TERMINAL = False
35
+
36
+ click.rich_click.USE_RICH_MARKUP = True
37
+ click.rich_click.SHOW_ARGUMENTS = True
38
+ click.rich_click.GROUP_ARGUMENTS_OPTIONS = True
39
+
40
+ # Get package version dynamically from installed package metadata
41
+ try:
42
+ __version__ = version('kb-dashboard-cli')
43
+ except PackageNotFoundError:
44
+ # Fallback if package is not installed (e.g., during development)
45
+ __version__ = '0.0.0-dev'
46
+
47
+
48
+ @click.group()
49
+ @click.version_option(version=__version__)
50
+ @click.option(
51
+ '--loglevel',
52
+ type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR'], case_sensitive=False),
53
+ default='WARNING',
54
+ help='Set logging verbosity level for compilation output.',
55
+ )
56
+ @click.pass_context
57
+ def cli(ctx: click.Context, loglevel: str) -> None:
58
+ r"""Kibana Dashboard Compiler - Compile YAML dashboards to Kibana format.
59
+
60
+ This tool helps you manage Kibana dashboards as code by compiling YAML
61
+ configurations into Kibana's NDJSON format and optionally uploading them
62
+ to your Kibana instance.
63
+
64
+ \b
65
+ Common workflows:
66
+ 1. Compile dashboards: kb-dashboard compile
67
+ 2. Compile and upload: kb-dashboard compile --upload
68
+ 3. Take a screenshot: kb-dashboard screenshot --dashboard-id ID --output file.png
69
+ 4. Export for issue: kb-dashboard export-for-issue --dashboard-id ID
70
+ 5. Disassemble dashboard: kb-dashboard disassemble dashboard.ndjson -o output_dir
71
+
72
+ \b
73
+ Authentication:
74
+ Use either username/password OR API key (not both):
75
+ - Basic auth: --kibana-username USER --kibana-password PASS
76
+ - API key: --kibana-api-key KEY (recommended for production)
77
+
78
+ Use environment variables (KIBANA_URL, KIBANA_USERNAME, KIBANA_PASSWORD,
79
+ KIBANA_API_KEY) to avoid passing credentials on the command line.
80
+ """
81
+ # Configure logging based on CLI option
82
+ log_level: int = getattr(logging, loglevel.upper()) # pyright: ignore[reportAny]
83
+ logging.basicConfig(level=log_level, format='%(message)s')
84
+ # Also set level for our specific logger
85
+ logging.getLogger('dashboard_compiler').setLevel(log_level)
86
+
87
+ _ = ctx.ensure_object(CliContext)
88
+
89
+
90
+ # Register local file operation commands
91
+ cli.add_command(compile_dashboards)
92
+ cli.add_command(disassemble)
93
+ cli.add_command(lsp)
94
+
95
+ # Register remote Kibana/Elasticsearch commands
96
+ cli.add_command(screenshot_dashboard, name='screenshot')
97
+ cli.add_command(fetch)
98
+ cli.add_command(export_for_issue, name='export-for-issue')
99
+ cli.add_command(load_sample_data_command, name='load-sample-data')
100
+ cli.add_command(extract_sample_data_command, name='extract-sample-data')
101
+
102
+
103
+ if __name__ == '__main__':
104
+ cli()
@@ -0,0 +1,21 @@
1
+ """CLI context for sharing configuration across Click commands."""
2
+
3
+ from dataclasses import dataclass, field
4
+
5
+ from elasticsearch import AsyncElasticsearch
6
+ from kb_dashboard_tools.kibana_client import KibanaClient
7
+
8
+
9
+ @dataclass
10
+ class CliContext:
11
+ """Context object for sharing clients across CLI commands.
12
+
13
+ This dataclass holds optional pre-configured clients that are populated
14
+ by the CLI option decorators. Each command uses only the clients it needs.
15
+ """
16
+
17
+ kibana_client: KibanaClient | None = field(default=None)
18
+ """Pre-configured Kibana client, populated by @kibana_options decorator."""
19
+
20
+ es_client: AsyncElasticsearch | None = field(default=None)
21
+ """Pre-configured Elasticsearch client, populated by @elasticsearch_options decorator."""
@@ -0,0 +1,520 @@
1
+ """CLI commands for local file operations (compile, disassemble, lsp)."""
2
+
3
+ import asyncio
4
+ import io
5
+ import logging
6
+ import sys
7
+ import webbrowser
8
+ from pathlib import Path
9
+ from typing import TYPE_CHECKING
10
+
11
+ import rich_click as click
12
+ import yaml
13
+ from kb_dashboard_core.dashboard.view import KbnDashboard
14
+ from kb_dashboard_core.dashboard_compiler import load, render
15
+ from kb_dashboard_core.shared.error_formatter import format_validation_error, format_yaml_error
16
+ from kb_dashboard_core.tools.disassemble import disassemble_dashboard, parse_ndjson
17
+ from pydantic import ValidationError
18
+
19
+ from dashboard_compiler.cli_context import CliContext
20
+ from dashboard_compiler.cli_options import kibana_options
21
+ from dashboard_compiler.cli_output import (
22
+ console,
23
+ create_error_table,
24
+ create_progress,
25
+ print_browser,
26
+ print_bullet,
27
+ print_dim_bullet,
28
+ print_error,
29
+ print_plain,
30
+ print_success,
31
+ print_upload,
32
+ print_warning,
33
+ )
34
+
35
+ if TYPE_CHECKING:
36
+ from kb_dashboard_tools.kibana_client import KibanaClient
37
+
38
+ # Constants
39
+ PROJECT_ROOT = Path(__file__).parent.parent.parent
40
+ DEFAULT_INPUT_DIR = PROJECT_ROOT / 'inputs'
41
+ DEFAULT_OUTPUT_DIR = PROJECT_ROOT / 'output'
42
+ MAX_EXIT_CODE = 125
43
+
44
+
45
+ def _sanitize_filename(name: str, max_length: int = 200) -> str:
46
+ """Convert a string to a filesystem-safe filename."""
47
+ # Replace filesystem-unsafe characters with underscores
48
+ unsafe_chars = ['/', '\\', ':', '*', '?', '"', '<', '>', '|']
49
+ result = name
50
+ for char in unsafe_chars:
51
+ result = result.replace(char, '_')
52
+
53
+ # Replace spaces with underscores and trim whitespace
54
+ result = result.strip().replace(' ', '_')
55
+
56
+ # Strip leading dots to avoid hidden files and reserved names
57
+ result = result.lstrip('.')
58
+
59
+ # Handle empty or reserved results
60
+ if len(result) == 0 or result in ('.', '..'):
61
+ result = 'untitled'
62
+
63
+ # Truncate to max length
64
+ if len(result) > max_length:
65
+ result = result[:max_length]
66
+
67
+ return result
68
+
69
+
70
+ def _file_content_changed(file_path: Path, new_content: str) -> bool:
71
+ """Check if writing new content would change the filesystem."""
72
+ if not file_path.exists():
73
+ return True
74
+
75
+ existing_content = file_path.read_text(encoding='utf-8')
76
+ return existing_content != new_content
77
+
78
+
79
+ def _write_ndjson(output_path: Path, lines: list[str], overwrite: bool = True) -> None:
80
+ """Write a list of JSON strings to an NDJSON file."""
81
+ if overwrite is False and output_path.exists():
82
+ return
83
+
84
+ with output_path.open('w', encoding='utf-8') as f:
85
+ for line in lines:
86
+ _ = f.write(line + '\n')
87
+
88
+
89
+ def compile_yaml_to_json(yaml_path: Path) -> tuple[list[str], list[KbnDashboard], str | None]:
90
+ """Compile dashboard YAML to JSON strings for NDJSON.
91
+
92
+ Args:
93
+ yaml_path: Path to the dashboard YAML configuration file.
94
+
95
+ Returns:
96
+ Tuple of (list of JSON strings for NDJSON lines, list of dashboard models, error message or None).
97
+
98
+ """
99
+ try:
100
+ dashboards = load(str(yaml_path))
101
+ json_lines: list[str] = []
102
+ kbn_dashboards: list[KbnDashboard] = []
103
+ for dashboard in dashboards:
104
+ dashboard_kbn_model = render(dashboard)
105
+ json_lines.append(dashboard_kbn_model.model_dump_json(by_alias=True))
106
+ kbn_dashboards.append(dashboard_kbn_model)
107
+ except FileNotFoundError:
108
+ return [], [], f'YAML file not found: {yaml_path}'
109
+ except yaml.YAMLError as e:
110
+ return [], [], format_yaml_error(e, yaml_path)
111
+ except ValidationError as e:
112
+ return [], [], format_validation_error(e, yaml_path)
113
+ except (ValueError, TypeError, KeyError) as e:
114
+ return [], [], f'Error compiling {yaml_path}: {e}'
115
+ else:
116
+ return json_lines, kbn_dashboards, None
117
+
118
+
119
+ def get_yaml_files(directory: Path) -> list[Path]:
120
+ """Get all YAML files from a directory recursively.
121
+
122
+ Args:
123
+ directory: Directory to search for YAML files.
124
+
125
+ Returns:
126
+ List of Path objects pointing to YAML files.
127
+
128
+ Raises:
129
+ click.ClickException: If directory is not found.
130
+
131
+ """
132
+ if not directory.is_dir():
133
+ msg = f'Directory not found: {directory}'
134
+ raise click.ClickException(msg)
135
+
136
+ yaml_files = sorted(directory.rglob('*.yaml'))
137
+
138
+ if len(yaml_files) == 0:
139
+ print_warning(f'No YAML files found in {directory}')
140
+
141
+ return yaml_files
142
+
143
+
144
+ async def _upload_to_kibana(
145
+ client: 'KibanaClient',
146
+ ndjson_file: Path,
147
+ overwrite: bool,
148
+ open_browser: bool,
149
+ ) -> None:
150
+ """Upload NDJSON file to Kibana.
151
+
152
+ Args:
153
+ client: Pre-configured Kibana client
154
+ ndjson_file: Path to NDJSON file to upload
155
+ overwrite: Whether to overwrite existing objects
156
+ open_browser: Whether to open browser after successful upload
157
+
158
+ Raises:
159
+ click.ClickException: If upload fails.
160
+
161
+ """
162
+ import aiohttp
163
+
164
+ async with client:
165
+ try:
166
+ result = await client.upload_ndjson(ndjson_file, overwrite=overwrite)
167
+
168
+ if result.success is True:
169
+ print_success(f'Successfully uploaded {result.success_count} object(s) to Kibana')
170
+
171
+ dashboard_ids = [obj.destination_id or obj.id for obj in result.success_results if obj.type == 'dashboard']
172
+
173
+ if len(dashboard_ids) > 0 and open_browser is True:
174
+ dashboard_url = client.get_dashboard_url(dashboard_ids[0])
175
+ print_browser(f'Opening dashboard: {dashboard_url}')
176
+ _ = webbrowser.open_new_tab(dashboard_url)
177
+
178
+ if len(result.errors) > 0:
179
+ print_warning(f'Encountered {len(result.errors)} error(s):')
180
+ console.print(create_error_table(result.errors))
181
+ else:
182
+ print_error('Upload failed')
183
+ if len(result.errors) > 0:
184
+ console.print(create_error_table(result.errors))
185
+ msg = 'Upload to Kibana failed'
186
+ raise click.ClickException(msg)
187
+
188
+ except aiohttp.ClientError as e:
189
+ msg = f'Error communicating with Kibana: {e}'
190
+ raise click.ClickException(msg) from e
191
+ except (OSError, ValueError) as e:
192
+ msg = f'Error uploading to Kibana: {e}'
193
+ raise click.ClickException(msg) from e
194
+
195
+
196
+ @click.command('compile')
197
+ @click.option(
198
+ '--input-dir',
199
+ type=click.Path(file_okay=False, path_type=Path),
200
+ default=DEFAULT_INPUT_DIR,
201
+ help='Directory containing YAML dashboard files to compile.',
202
+ )
203
+ @click.option(
204
+ '--input-file',
205
+ type=click.Path(exists=True, dir_okay=False, path_type=Path),
206
+ default=None,
207
+ help='Path to a single YAML dashboard file to compile. When provided, --input-dir is ignored.',
208
+ )
209
+ @click.option(
210
+ '--output-dir',
211
+ type=click.Path(file_okay=False, path_type=Path),
212
+ default=DEFAULT_OUTPUT_DIR,
213
+ help='Directory where compiled NDJSON files will be written.',
214
+ )
215
+ @click.option(
216
+ '--output-file',
217
+ type=str,
218
+ default='compiled_dashboards.ndjson',
219
+ help='Filename for the combined output NDJSON file containing all dashboards.',
220
+ )
221
+ @click.option(
222
+ '--format',
223
+ 'output_format',
224
+ type=click.Choice(['ndjson', 'json'], case_sensitive=False),
225
+ default='ndjson',
226
+ help='Output format: "ndjson" for combined files (default), "json" for individual pretty-printed files named by dashboard ID.',
227
+ )
228
+ @click.option(
229
+ '--upload',
230
+ is_flag=True,
231
+ help='Upload compiled dashboards to Kibana immediately after compilation.',
232
+ )
233
+ @kibana_options
234
+ @click.option(
235
+ '--no-browser',
236
+ is_flag=True,
237
+ help='Prevent browser from opening automatically after successful upload.',
238
+ )
239
+ @click.option(
240
+ '--overwrite/--no-overwrite',
241
+ default=True,
242
+ help='Whether to overwrite existing dashboards in Kibana (default: overwrite).',
243
+ )
244
+ @click.option(
245
+ '--exit-non-zero-on-change',
246
+ is_flag=True,
247
+ help='Exit with non-zero code when files change (useful for CI sync detection).',
248
+ )
249
+ def compile_dashboards( # noqa: PLR0913, PLR0912, PLR0915
250
+ ctx: click.Context,
251
+ input_dir: Path,
252
+ input_file: Path | None,
253
+ output_dir: Path,
254
+ output_file: str,
255
+ output_format: str,
256
+ upload: bool,
257
+ no_browser: bool,
258
+ overwrite: bool,
259
+ exit_non_zero_on_change: bool,
260
+ ) -> None:
261
+ r"""Compile YAML dashboard configurations to NDJSON format.
262
+
263
+ This command finds all YAML files in the input directory (or compiles a
264
+ single file provided via --input-file), compiles them to Kibana's JSON
265
+ format, and outputs them as NDJSON files.
266
+
267
+ Optionally, you can upload the compiled dashboards directly to Kibana
268
+ using the --upload flag.
269
+
270
+ The --format option controls output format:
271
+ - ndjson (default): Groups dashboards by directory into NDJSON files
272
+ - json: Creates individual pretty-printed JSON files named by dashboard ID
273
+
274
+ By default, the command exits with code 0 on success. Use --exit-non-zero-on-change
275
+ to enable CI sync detection mode, where the exit code equals the number of files
276
+ that changed (capped at 125).
277
+
278
+ \b
279
+ Examples:
280
+ # Compile dashboards from default directory
281
+ kb-dashboard compile
282
+
283
+ # Compile a single dashboard file
284
+ kb-dashboard compile --input-file ./dashboards/example.yaml
285
+
286
+ # Compile with custom input and output directories
287
+ kb-dashboard compile --input-dir ./dashboards --output-dir ./output
288
+
289
+ # Compile to individual JSON files per dashboard
290
+ kb-dashboard compile --format json --output-dir ./output
291
+
292
+ # Compile and upload to Kibana using basic auth
293
+ kb-dashboard compile --upload --kibana-url https://kibana.example.com \
294
+ --kibana-username admin --kibana-password secret
295
+
296
+ # Compile and upload using API key (recommended)
297
+ kb-dashboard compile --upload --kibana-url https://kibana.example.com \
298
+ --kibana-api-key "your-api-key-here"
299
+
300
+ # Use environment variables for credentials
301
+ export KIBANA_URL=https://kibana.example.com
302
+ export KIBANA_API_KEY=your-api-key
303
+ kb-dashboard compile --upload
304
+ """
305
+ # Context is already populated by @kibana_options decorator
306
+ if not isinstance(ctx.obj, CliContext): # pyright: ignore[reportAny]
307
+ msg = 'Context object must be CliContext'
308
+ raise TypeError(msg)
309
+ cli_context = ctx.obj
310
+
311
+ # Normalize output format once for consistent comparisons
312
+ output_format_lower = output_format.lower()
313
+
314
+ output_dir.mkdir(parents=True, exist_ok=True)
315
+
316
+ if input_file is not None:
317
+ if input_file.suffix != '.yaml':
318
+ msg = f'Input file must have a .yaml extension: {input_file}'
319
+ raise click.ClickException(msg)
320
+ yaml_files = [input_file]
321
+ else:
322
+ yaml_files = get_yaml_files(input_dir)
323
+ if len(yaml_files) == 0:
324
+ print_plain('No YAML files to compile.', style='yellow')
325
+ return
326
+
327
+ ndjson_lines: list[str] = []
328
+ errors: list[str] = []
329
+ files_to_write: dict[Path, list[str]] = {}
330
+ json_files_to_write: list[tuple[Path, str]] = []
331
+ json_filenames_seen: set[str] = set()
332
+ changed_files_count = 0
333
+
334
+ with create_progress() as progress:
335
+ task = progress.add_task('Compiling dashboards...', total=len(yaml_files))
336
+
337
+ for yaml_file in yaml_files:
338
+ try:
339
+ display_path = yaml_file.relative_to(PROJECT_ROOT)
340
+ except ValueError:
341
+ display_path = yaml_file
342
+ progress.update(task, description=f'Compiling: {display_path}')
343
+ compiled_jsons, kbn_dashboards, error = compile_yaml_to_json(yaml_file)
344
+
345
+ if len(compiled_jsons) > 0:
346
+ if output_format_lower == 'json':
347
+ for kbn_dashboard in kbn_dashboards:
348
+ if not kbn_dashboard.id:
349
+ msg = f'Dashboard ID is required for JSON output: {yaml_file}'
350
+ raise click.ClickException(msg)
351
+ safe_name = _sanitize_filename(kbn_dashboard.id)
352
+ if safe_name in json_filenames_seen:
353
+ msg = f'Duplicate dashboard ID after sanitization: {kbn_dashboard.id}'
354
+ raise click.ClickException(msg)
355
+ json_filenames_seen.add(safe_name)
356
+ json_file = output_dir / f'{safe_name}.json'
357
+ pretty_json = kbn_dashboard.model_dump_json(by_alias=True, indent=2)
358
+ json_files_to_write.append((json_file, pretty_json))
359
+ else:
360
+ filename = yaml_file.parent.stem
361
+ individual_file = output_dir / f'{filename}.ndjson'
362
+ if individual_file not in files_to_write:
363
+ files_to_write[individual_file] = []
364
+ files_to_write[individual_file].extend(compiled_jsons)
365
+ ndjson_lines.extend(compiled_jsons)
366
+ elif error is not None:
367
+ errors.append(error)
368
+
369
+ progress.advance(task)
370
+
371
+ if output_format_lower == 'json':
372
+ for json_file, json_content in json_files_to_write:
373
+ if _file_content_changed(json_file, json_content):
374
+ changed_files_count += 1
375
+ with json_file.open('w', encoding='utf-8') as f:
376
+ _ = f.write(json_content)
377
+ else:
378
+ for individual_file, jsons in files_to_write.items():
379
+ content = '\n'.join(jsons) + '\n'
380
+ if _file_content_changed(individual_file, content):
381
+ changed_files_count += 1
382
+ _write_ndjson(individual_file, jsons, overwrite=True)
383
+
384
+ if len(ndjson_lines) > 0:
385
+ print_success(f'Successfully compiled {len(ndjson_lines)} dashboard(s)')
386
+
387
+ if len(errors) > 0:
388
+ print_warning(f'Encountered {len(errors)} error(s):')
389
+ for error in errors:
390
+ print_bullet(error)
391
+
392
+ if len(ndjson_lines) == 0:
393
+ print_error('No valid YAML configurations found or compiled.')
394
+ return
395
+
396
+ if output_format_lower == 'json':
397
+ print_success(f'Wrote {len(json_files_to_write)} individual JSON file(s)')
398
+ else:
399
+ combined_file = output_dir / output_file
400
+ combined_content = '\n'.join(ndjson_lines) + '\n'
401
+ if _file_content_changed(combined_file, combined_content):
402
+ changed_files_count += 1
403
+ _write_ndjson(combined_file, ndjson_lines, overwrite=True)
404
+ try:
405
+ display_path = combined_file.relative_to(PROJECT_ROOT)
406
+ except ValueError:
407
+ display_path = combined_file
408
+ print_success(f'Wrote combined file: {display_path}')
409
+
410
+ if changed_files_count > 0:
411
+ print_warning(f'{changed_files_count} file(s) changed')
412
+ else:
413
+ print_success('No files changed')
414
+
415
+ if upload is True:
416
+ if output_format_lower == 'json':
417
+ print_warning('Upload is not supported with --format json')
418
+ else:
419
+ if cli_context.kibana_client is None:
420
+ msg = 'Kibana client not configured'
421
+ raise click.ClickException(msg)
422
+ print_upload('Uploading to Kibana...')
423
+ combined_file = output_dir / output_file
424
+ asyncio.run(_upload_to_kibana(cli_context.kibana_client, combined_file, overwrite, not no_browser))
425
+
426
+ if exit_non_zero_on_change is True:
427
+ exit_code = min(changed_files_count, MAX_EXIT_CODE)
428
+ ctx.exit(exit_code)
429
+
430
+
431
+ @click.command('disassemble')
432
+ @click.argument('input_file', type=click.Path(exists=True, path_type=Path), required=False)
433
+ @click.option(
434
+ '-o',
435
+ '--output',
436
+ type=click.Path(path_type=Path),
437
+ required=True,
438
+ help='Output directory for component files.',
439
+ )
440
+ def disassemble(input_file: Path | None, output: Path) -> None:
441
+ r"""Disassemble a Kibana dashboard NDJSON file into components.
442
+
443
+ This command breaks down a Kibana dashboard JSON file (in NDJSON format)
444
+ into separate files for easier processing by LLMs. This enables incremental
445
+ conversion of large dashboards to YAML format.
446
+
447
+ The dashboard is split into:
448
+ - metadata.json: Dashboard metadata
449
+ - options.json: Dashboard display options
450
+ - controls.json: Dashboard control group configuration
451
+ - filters.json: Dashboard-level filters
452
+ - references.json: Data view and index pattern references
453
+ - panels/: Directory containing individual panel JSON files
454
+
455
+ \b
456
+ Examples:
457
+ # Disassemble a dashboard NDJSON file
458
+ kb-dashboard disassemble dashboard.ndjson -o output_dir
459
+
460
+ # Read from stdin
461
+ cat dashboard.ndjson | kb-dashboard disassemble -o output_dir
462
+
463
+ # Download and disassemble directly
464
+ curl -u user:pass http://localhost:5601/api/saved_objects/dashboard/my-id | \
465
+ kb-dashboard disassemble -o output_dir
466
+ """
467
+ try:
468
+ if input_file is None:
469
+ # Use TextIOWrapper to ensure UTF-8 encoding when reading from stdin
470
+ # This avoids issues on Windows where the default encoding might not be UTF-8
471
+ content = (
472
+ io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8').read()
473
+ if hasattr(sys.stdin, 'buffer')
474
+ else sys.stdin.read() # Fallback for environments where stdin.buffer is not available
475
+ )
476
+ else:
477
+ content = input_file.read_text(encoding='utf-8')
478
+
479
+ dashboard = parse_ndjson(content)
480
+ components = disassemble_dashboard(dashboard, output)
481
+
482
+ print_success(f'Dashboard disassembled to: {output}')
483
+ print_dim_bullet('metadata.json: Dashboard metadata')
484
+
485
+ if components.get('options') is True:
486
+ print_dim_bullet('options.json: Dashboard options')
487
+
488
+ if components.get('controls') is True:
489
+ print_dim_bullet('controls.json: Control group configuration')
490
+
491
+ if components.get('filters') is True:
492
+ print_dim_bullet('filters.json: Dashboard-level filters')
493
+
494
+ if components.get('references') is True:
495
+ print_dim_bullet('references.json: Data view references')
496
+
497
+ panel_count = components.get('panels')
498
+ if panel_count is not None and isinstance(panel_count, int):
499
+ print_dim_bullet(f'panels/: {panel_count} panel files')
500
+
501
+ except (ValueError, OSError) as e:
502
+ msg = f'Error disassembling dashboard: {e}'
503
+ raise click.ClickException(msg) from e
504
+
505
+
506
+ @click.command()
507
+ def lsp() -> None:
508
+ """Start the Language Server Protocol (LSP) server for IDE integration.
509
+
510
+ The LSP server provides real-time compilation, validation, and code
511
+ completion for YAML dashboard files in supported IDEs like VS Code.
512
+
513
+ This server communicates via stdin/stdout using the Language Server
514
+ Protocol specification.
515
+ """
516
+ from dashboard_compiler.lsp.server import start_server as start_lsp_server
517
+
518
+ # Force logging to stderr to prevent stdout contamination of JSON-RPC protocol
519
+ logging.basicConfig(level=logging.INFO, format='%(message)s', stream=sys.stderr, force=True)
520
+ start_lsp_server()