ttnn-visualizer 0.69.0__py3-none-any.whl → 0.71.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ttnn_visualizer/app.py +32 -1
- ttnn_visualizer/csv_queries.py +64 -36
- ttnn_visualizer/models.py +0 -1
- ttnn_visualizer/queries.py +25 -2
- ttnn_visualizer/settings.py +9 -7
- ttnn_visualizer/sftp_operations.py +2 -4
- ttnn_visualizer/static/assets/allPaths-D6qA1aj4.js +1 -0
- ttnn_visualizer/static/assets/allPathsLoader-t8G4bNwo.js +2 -0
- ttnn_visualizer/static/assets/{index-C28SjqxA.js → index-8RVye9cY.js} +335 -364
- ttnn_visualizer/static/assets/index-BVzPYDVR.css +1 -0
- ttnn_visualizer/static/assets/index-BmDjQHI0.js +1 -0
- ttnn_visualizer/static/assets/index-CzNKtOwn.js +1 -0
- ttnn_visualizer/static/assets/splitPathsBySizeLoader--w3Ey8_r.js +1 -0
- ttnn_visualizer/static/index.html +2 -2
- ttnn_visualizer/tests/test_queries.py +1 -2
- ttnn_visualizer/tests/test_serializers.py +4 -12
- ttnn_visualizer/tests/test_utils.py +98 -6
- ttnn_visualizer/utils.py +259 -9
- ttnn_visualizer/views.py +20 -22
- {ttnn_visualizer-0.69.0.dist-info → ttnn_visualizer-0.71.0.dist-info}/METADATA +2 -2
- ttnn_visualizer-0.71.0.dist-info/RECORD +44 -0
- {ttnn_visualizer-0.69.0.dist-info → ttnn_visualizer-0.71.0.dist-info}/licenses/LICENSE +2 -0
- ttnn_visualizer/static/assets/allPaths-Clj2DdFL.js +0 -1
- ttnn_visualizer/static/assets/allPathsLoader-DisDEJDi.js +0 -2
- ttnn_visualizer/static/assets/index-BZITDwoa.js +0 -1
- ttnn_visualizer/static/assets/index-D_AHNWw3.css +0 -1
- ttnn_visualizer/static/assets/index-voJy5fZe.js +0 -1
- ttnn_visualizer/static/assets/splitPathsBySizeLoader-D98y4BkT.js +0 -1
- ttnn_visualizer-0.69.0.dist-info/RECORD +0 -44
- {ttnn_visualizer-0.69.0.dist-info → ttnn_visualizer-0.71.0.dist-info}/WHEEL +0 -0
- {ttnn_visualizer-0.69.0.dist-info → ttnn_visualizer-0.71.0.dist-info}/entry_points.txt +0 -0
- {ttnn_visualizer-0.69.0.dist-info → ttnn_visualizer-0.71.0.dist-info}/licenses/LICENSE_understanding.txt +0 -0
- {ttnn_visualizer-0.69.0.dist-info → ttnn_visualizer-0.71.0.dist-info}/top_level.txt +0 -0
ttnn_visualizer/app.py
CHANGED
|
@@ -28,7 +28,12 @@ from ttnn_visualizer.exceptions import (
|
|
|
28
28
|
)
|
|
29
29
|
from ttnn_visualizer.instances import create_instance_from_local_paths
|
|
30
30
|
from ttnn_visualizer.settings import Config, DefaultConfig
|
|
31
|
-
from ttnn_visualizer.utils import
|
|
31
|
+
from ttnn_visualizer.utils import (
|
|
32
|
+
find_gunicorn_path,
|
|
33
|
+
get_app_data_directory,
|
|
34
|
+
get_report_data_directory,
|
|
35
|
+
migrate_old_data_directory,
|
|
36
|
+
)
|
|
32
37
|
from werkzeug.debug import DebuggedApplication
|
|
33
38
|
from werkzeug.middleware.proxy_fix import ProxyFix
|
|
34
39
|
|
|
@@ -123,7 +128,10 @@ def extensions(app: flask.Flask):
|
|
|
123
128
|
if app.config["USE_WEBSOCKETS"]:
|
|
124
129
|
socketio.init_app(app)
|
|
125
130
|
|
|
131
|
+
# Create app data and report directories
|
|
126
132
|
Path(app.config["APP_DATA_DIRECTORY"]).mkdir(parents=True, exist_ok=True)
|
|
133
|
+
Path(app.config["LOCAL_DATA_DIRECTORY"]).mkdir(parents=True, exist_ok=True)
|
|
134
|
+
Path(app.config["REMOTE_DATA_DIRECTORY"]).mkdir(parents=True, exist_ok=True)
|
|
127
135
|
db.init_app(app)
|
|
128
136
|
|
|
129
137
|
if app.config["USE_WEBSOCKETS"]:
|
|
@@ -345,6 +353,29 @@ def main():
|
|
|
345
353
|
)
|
|
346
354
|
config.SQLALCHEMY_DATABASE_URI = f"sqlite:///{_db_file_path}"
|
|
347
355
|
|
|
356
|
+
# Check for and migrate old data from site-packages if needed
|
|
357
|
+
# Only migrate if environment variables are not explicitly set
|
|
358
|
+
if not os.getenv("APP_DATA_DIRECTORY") and not os.getenv("REPORT_DATA_DIRECTORY"):
|
|
359
|
+
# Calculate what the old directories would have been (in site-packages)
|
|
360
|
+
old_app_data_dir = config.APPLICATION_DIR
|
|
361
|
+
old_report_data_dir = str(
|
|
362
|
+
Path(config.APPLICATION_DIR).joinpath("ttnn_visualizer", "data")
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
# Get new directories (already calculated in config)
|
|
366
|
+
new_app_data_dir = config.APP_DATA_DIRECTORY
|
|
367
|
+
new_report_data_dir = config.REPORT_DATA_DIRECTORY
|
|
368
|
+
|
|
369
|
+
# Only migrate if we're not in TT-Metal mode (migration doesn't apply there)
|
|
370
|
+
if not config.TT_METAL_HOME:
|
|
371
|
+
migrate_old_data_directory(
|
|
372
|
+
old_app_data_dir,
|
|
373
|
+
old_report_data_dir,
|
|
374
|
+
new_app_data_dir,
|
|
375
|
+
new_report_data_dir,
|
|
376
|
+
config.DB_VERSION,
|
|
377
|
+
)
|
|
378
|
+
|
|
348
379
|
display_mode_info_without_db(config)
|
|
349
380
|
|
|
350
381
|
# If profiler/performance paths are provided, create an instance
|
ttnn_visualizer/csv_queries.py
CHANGED
|
@@ -423,26 +423,30 @@ class OpsPerformanceReportQueries:
|
|
|
423
423
|
]
|
|
424
424
|
|
|
425
425
|
STACKED_REPORT_COLUMNS = [
|
|
426
|
-
"
|
|
427
|
-
"
|
|
428
|
-
"
|
|
429
|
-
"
|
|
430
|
-
"
|
|
431
|
-
"
|
|
432
|
-
"
|
|
433
|
-
"
|
|
426
|
+
"%",
|
|
427
|
+
"OP Code Joined",
|
|
428
|
+
"Device_Time_Sum_us",
|
|
429
|
+
"Ops_count",
|
|
430
|
+
"Op_Category",
|
|
431
|
+
"Flops_min",
|
|
432
|
+
"Flops_max",
|
|
433
|
+
"Flops_mean",
|
|
434
|
+
"Flops_std",
|
|
435
|
+
"Flops_weighted_mean",
|
|
434
436
|
]
|
|
435
437
|
|
|
436
438
|
STACKED_REPORT_COLUMNS_WITH_DEVICE = [
|
|
437
|
-
"
|
|
438
|
-
"
|
|
439
|
-
"
|
|
440
|
-
"
|
|
441
|
-
"
|
|
442
|
-
"
|
|
443
|
-
"
|
|
444
|
-
"
|
|
445
|
-
"
|
|
439
|
+
"%",
|
|
440
|
+
"OP Code Joined",
|
|
441
|
+
"Device",
|
|
442
|
+
"Device_Time_Sum_us",
|
|
443
|
+
"Ops_count",
|
|
444
|
+
"Op_Category",
|
|
445
|
+
"Flops_min",
|
|
446
|
+
"Flops_max",
|
|
447
|
+
"Flops_mean",
|
|
448
|
+
"Flops_std",
|
|
449
|
+
"Flops_weighted_mean",
|
|
446
450
|
]
|
|
447
451
|
|
|
448
452
|
PASSTHROUGH_COLUMNS = {
|
|
@@ -455,6 +459,7 @@ class OpsPerformanceReportQueries:
|
|
|
455
459
|
DEFAULT_PRINT_SIGNPOSTS = True
|
|
456
460
|
DEFAULT_MIN_PERCENTAGE = 0.5
|
|
457
461
|
DEFAULT_ID_RANGE = None
|
|
462
|
+
DEFAULT_ARCH = None
|
|
458
463
|
DEFAULT_NO_ADVICE = False
|
|
459
464
|
DEFAULT_TRACING_MODE = False
|
|
460
465
|
DEFAULT_RAW_OP_CODES = True
|
|
@@ -462,28 +467,36 @@ class OpsPerformanceReportQueries:
|
|
|
462
467
|
DEFAULT_NO_STACKED_REPORT = False
|
|
463
468
|
DEFAULT_NO_STACK_BY_IN0 = True
|
|
464
469
|
DEFAULT_MERGE_DEVICES = True
|
|
470
|
+
DEFAULT_STACKED_CSV_FILE = None # Stacked report CSV output file
|
|
471
|
+
DEFAULT_NO_SUMMARY = False
|
|
472
|
+
DEFAULT_SUMMARY_FILE = None # Stacked report output file
|
|
473
|
+
DEFAULT_CLASSIC_COLORS = False # Colour scheme for plotted stacked report
|
|
474
|
+
DEFAULT_GROUP_BY = None # Group by method for stacked report
|
|
465
475
|
|
|
466
476
|
@classmethod
|
|
467
477
|
def generate_report(cls, instance, **kwargs):
|
|
468
478
|
raw_csv = OpsPerformanceQueries.get_raw_csv(instance)
|
|
469
479
|
csv_file = StringIO(raw_csv)
|
|
470
|
-
|
|
471
|
-
|
|
480
|
+
csv_summary_file = tempfile.NamedTemporaryFile(delete=False)
|
|
481
|
+
csv_output_file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False)
|
|
482
|
+
# The perf_report library creates files with format: {csv_summary_name}.csv and {csv_summary_name}.png
|
|
483
|
+
summary_csv_path = f"{csv_summary_file.name}.csv"
|
|
484
|
+
summary_png_path = f"{csv_summary_file.name}.png"
|
|
485
|
+
csv_summary_file.close()
|
|
486
|
+
csv_output_file.close()
|
|
487
|
+
|
|
472
488
|
start_signpost = kwargs.get("start_signpost", cls.DEFAULT_START_SIGNPOST)
|
|
473
489
|
end_signpost = kwargs.get("end_signpost", cls.DEFAULT_END_SIGNPOST)
|
|
474
490
|
ignore_signposts = cls.DEFAULT_IGNORE_SIGNPOSTS
|
|
475
491
|
print_signposts = kwargs.get("print_signposts", cls.DEFAULT_PRINT_SIGNPOSTS)
|
|
476
|
-
stack_by_in0 = kwargs.get("stack_by_in0", cls.DEFAULT_NO_STACK_BY_IN0)
|
|
477
492
|
no_host_ops = kwargs.get("hide_host_ops", cls.DEFAULT_NO_HOST_OPS)
|
|
478
493
|
merge_devices = kwargs.get("merge_devices", cls.DEFAULT_MERGE_DEVICES)
|
|
494
|
+
tracing_mode = kwargs.get("tracing_mode", cls.DEFAULT_TRACING_MODE)
|
|
495
|
+
group_by = kwargs.get("group_by", cls.DEFAULT_GROUP_BY)
|
|
479
496
|
|
|
480
497
|
if start_signpost or end_signpost:
|
|
481
498
|
ignore_signposts = False
|
|
482
499
|
|
|
483
|
-
# perf_report currently generates a PNG alongside the CSV using the same temp name - we'll just delete it afterwards
|
|
484
|
-
stacked_png_file = csv_stacked_output_file + ".png"
|
|
485
|
-
stacked_csv_file = csv_stacked_output_file + ".csv"
|
|
486
|
-
|
|
487
500
|
try:
|
|
488
501
|
perf_report.generate_perf_report(
|
|
489
502
|
[csv_file],
|
|
@@ -493,14 +506,19 @@ class OpsPerformanceReportQueries:
|
|
|
493
506
|
print_signposts,
|
|
494
507
|
cls.DEFAULT_MIN_PERCENTAGE,
|
|
495
508
|
cls.DEFAULT_ID_RANGE,
|
|
496
|
-
|
|
509
|
+
cls.DEFAULT_ARCH,
|
|
510
|
+
csv_output_file.name,
|
|
497
511
|
cls.DEFAULT_NO_ADVICE,
|
|
498
|
-
|
|
512
|
+
tracing_mode,
|
|
499
513
|
cls.DEFAULT_RAW_OP_CODES,
|
|
500
514
|
no_host_ops,
|
|
515
|
+
cls.DEFAULT_NO_SUMMARY,
|
|
516
|
+
group_by,
|
|
517
|
+
cls.DEFAULT_CLASSIC_COLORS,
|
|
518
|
+
csv_summary_file.name,
|
|
501
519
|
cls.DEFAULT_NO_STACKED_REPORT,
|
|
502
|
-
|
|
503
|
-
|
|
520
|
+
cls.DEFAULT_NO_STACK_BY_IN0,
|
|
521
|
+
cls.DEFAULT_STACKED_CSV_FILE,
|
|
504
522
|
not merge_devices,
|
|
505
523
|
)
|
|
506
524
|
except Exception as e:
|
|
@@ -532,9 +550,9 @@ class OpsPerformanceReportQueries:
|
|
|
532
550
|
|
|
533
551
|
report = []
|
|
534
552
|
|
|
535
|
-
if os.path.exists(csv_output_file):
|
|
553
|
+
if os.path.exists(csv_output_file.name):
|
|
536
554
|
try:
|
|
537
|
-
with open(csv_output_file, newline="") as csvfile:
|
|
555
|
+
with open(csv_output_file.name, newline="") as csvfile:
|
|
538
556
|
reader = csv.reader(csvfile, delimiter=",")
|
|
539
557
|
next(reader, None)
|
|
540
558
|
for row in reader:
|
|
@@ -574,13 +592,13 @@ class OpsPerformanceReportQueries:
|
|
|
574
592
|
except csv.Error as e:
|
|
575
593
|
raise DataFormatError() from e
|
|
576
594
|
finally:
|
|
577
|
-
os.unlink(csv_output_file)
|
|
595
|
+
os.unlink(csv_output_file.name)
|
|
578
596
|
|
|
579
597
|
stacked_report = []
|
|
580
598
|
|
|
581
|
-
if os.path.exists(
|
|
599
|
+
if os.path.exists(summary_csv_path):
|
|
582
600
|
try:
|
|
583
|
-
with open(
|
|
601
|
+
with open(summary_csv_path, newline="") as csvfile:
|
|
584
602
|
reader = csv.reader(csvfile, delimiter=",")
|
|
585
603
|
next(reader, None)
|
|
586
604
|
|
|
@@ -598,6 +616,11 @@ class OpsPerformanceReportQueries:
|
|
|
598
616
|
if index < len(row)
|
|
599
617
|
}
|
|
600
618
|
|
|
619
|
+
# Map "OP Code Joined" to "op_code" for consistency with non-stacked report
|
|
620
|
+
if "OP Code Joined" in processed_row:
|
|
621
|
+
processed_row["op_code"] = processed_row["OP Code Joined"]
|
|
622
|
+
del processed_row["OP Code Joined"]
|
|
623
|
+
|
|
601
624
|
if "op_code" in processed_row and any(
|
|
602
625
|
processed_row["op_code"] in signpost["op_code"]
|
|
603
626
|
for signpost in signposts
|
|
@@ -610,9 +633,14 @@ class OpsPerformanceReportQueries:
|
|
|
610
633
|
except csv.Error as e:
|
|
611
634
|
raise DataFormatError() from e
|
|
612
635
|
finally:
|
|
613
|
-
|
|
614
|
-
if os.path.exists(
|
|
615
|
-
os.unlink(
|
|
636
|
+
# Clean up the files created by perf_report library
|
|
637
|
+
if os.path.exists(summary_csv_path):
|
|
638
|
+
os.unlink(summary_csv_path)
|
|
639
|
+
if os.path.exists(summary_png_path):
|
|
640
|
+
os.unlink(summary_png_path)
|
|
641
|
+
# Clean up the original temp file that was just used for its name
|
|
642
|
+
if os.path.exists(csv_summary_file.name):
|
|
643
|
+
os.unlink(csv_summary_file.name)
|
|
616
644
|
|
|
617
645
|
return {
|
|
618
646
|
"report": report,
|
ttnn_visualizer/models.py
CHANGED
ttnn_visualizer/queries.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
#
|
|
3
3
|
# SPDX-FileCopyrightText: © 2025 Tenstorrent AI ULC
|
|
4
4
|
|
|
5
|
+
import dataclasses
|
|
5
6
|
import sqlite3
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
from typing import Any, Dict, Generator, List, Optional, Union
|
|
@@ -88,14 +89,24 @@ class DatabaseQueries:
|
|
|
88
89
|
|
|
89
90
|
return bool(rows)
|
|
90
91
|
|
|
92
|
+
def _get_table_columns(self, table_name: str) -> List[str]:
|
|
93
|
+
"""
|
|
94
|
+
Gets the list of column names for a table.
|
|
95
|
+
"""
|
|
96
|
+
query = f"PRAGMA table_info({table_name})"
|
|
97
|
+
rows = self.query_runner.execute_query(query)
|
|
98
|
+
return [row[1] for row in rows] # row[1] is the column name
|
|
99
|
+
|
|
91
100
|
def _query_table(
|
|
92
101
|
self,
|
|
93
102
|
table_name: str,
|
|
94
103
|
filters: Optional[Dict[str, Union[Any, List[Any]]]] = None,
|
|
95
104
|
additional_conditions: Optional[str] = None,
|
|
96
105
|
additional_params: Optional[List[Any]] = None,
|
|
106
|
+
columns: Optional[List[str]] = None,
|
|
97
107
|
) -> List[Any]:
|
|
98
|
-
|
|
108
|
+
columns_str = ", ".join(columns) if columns else "*"
|
|
109
|
+
query = f"SELECT {columns_str} FROM {table_name} WHERE 1=1"
|
|
99
110
|
params = []
|
|
100
111
|
|
|
101
112
|
if filters:
|
|
@@ -285,7 +296,19 @@ class DatabaseQueries:
|
|
|
285
296
|
def query_devices(
|
|
286
297
|
self, filters: Optional[Dict[str, Any]] = None
|
|
287
298
|
) -> Generator[Device, None, None]:
|
|
288
|
-
|
|
299
|
+
# Get the expected Device model field names in order
|
|
300
|
+
device_fields = [field.name for field in dataclasses.fields(Device)]
|
|
301
|
+
|
|
302
|
+
# Get all columns from the devices table
|
|
303
|
+
all_columns = self._get_table_columns("devices")
|
|
304
|
+
|
|
305
|
+
# Filter out num_storage_cores if it exists (for backwards compatibility)
|
|
306
|
+
# and ensure columns are in the order expected by the Device model
|
|
307
|
+
available_columns = set(all_columns) - {"num_storage_cores"}
|
|
308
|
+
columns = [col for col in device_fields if col in available_columns]
|
|
309
|
+
|
|
310
|
+
rows = self._query_table("devices", filters, columns=columns)
|
|
311
|
+
|
|
289
312
|
for row in rows:
|
|
290
313
|
yield Device(*row)
|
|
291
314
|
|
ttnn_visualizer/settings.py
CHANGED
|
@@ -9,6 +9,7 @@ from dotenv import load_dotenv
|
|
|
9
9
|
from sqlalchemy.pool import NullPool
|
|
10
10
|
from ttnn_visualizer.utils import (
|
|
11
11
|
get_app_data_directory,
|
|
12
|
+
get_report_data_directory,
|
|
12
13
|
is_running_in_container,
|
|
13
14
|
str_to_bool,
|
|
14
15
|
)
|
|
@@ -36,20 +37,21 @@ class DefaultConfig(object):
|
|
|
36
37
|
|
|
37
38
|
# Path Settings
|
|
38
39
|
DB_VERSION = "0.29.0" # App version when DB schema last changed
|
|
40
|
+
APPLICATION_DIR = os.path.abspath(os.path.join(__file__, "..", os.pardir))
|
|
41
|
+
TT_METAL_HOME = os.getenv("TT_METAL_HOME", None)
|
|
42
|
+
APP_DATA_DIRECTORY = os.getenv(
|
|
43
|
+
"APP_DATA_DIRECTORY",
|
|
44
|
+
get_app_data_directory(TT_METAL_HOME, APPLICATION_DIR),
|
|
45
|
+
)
|
|
39
46
|
REPORT_DATA_DIRECTORY = os.getenv(
|
|
40
|
-
"REPORT_DATA_DIRECTORY",
|
|
47
|
+
"REPORT_DATA_DIRECTORY",
|
|
48
|
+
get_report_data_directory(TT_METAL_HOME, APPLICATION_DIR),
|
|
41
49
|
)
|
|
42
50
|
LOCAL_DATA_DIRECTORY = Path(REPORT_DATA_DIRECTORY).joinpath("local")
|
|
43
51
|
REMOTE_DATA_DIRECTORY = Path(REPORT_DATA_DIRECTORY).joinpath("remote")
|
|
44
52
|
PROFILER_DIRECTORY_NAME = "profiler-reports"
|
|
45
53
|
PERFORMANCE_DIRECTORY_NAME = "performance-reports"
|
|
46
54
|
NPE_DIRECTORY_NAME = "npe-reports"
|
|
47
|
-
APPLICATION_DIR = os.path.abspath(os.path.join(__file__, "..", os.pardir))
|
|
48
|
-
TT_METAL_HOME = os.getenv("TT_METAL_HOME", None)
|
|
49
|
-
APP_DATA_DIRECTORY = os.getenv(
|
|
50
|
-
"APP_DATA_DIRECTORY",
|
|
51
|
-
get_app_data_directory(TT_METAL_HOME, APPLICATION_DIR),
|
|
52
|
-
)
|
|
53
55
|
|
|
54
56
|
STATIC_ASSETS_DIR = Path(APPLICATION_DIR).joinpath("ttnn_visualizer", "static")
|
|
55
57
|
SEND_FILE_MAX_AGE_DEFAULT = 0
|
|
@@ -7,7 +7,6 @@ import logging
|
|
|
7
7
|
import subprocess
|
|
8
8
|
import time
|
|
9
9
|
from pathlib import Path
|
|
10
|
-
from stat import S_ISDIR
|
|
11
10
|
from threading import Thread
|
|
12
11
|
from typing import List, Optional
|
|
13
12
|
|
|
@@ -28,7 +27,6 @@ logger = logging.getLogger(__name__)
|
|
|
28
27
|
|
|
29
28
|
TEST_CONFIG_FILE = "config.json"
|
|
30
29
|
TEST_PROFILER_FILE = "profile_log_device.csv"
|
|
31
|
-
REPORT_DATA_DIRECTORY = Path(__file__).parent.absolute().joinpath("data")
|
|
32
30
|
|
|
33
31
|
|
|
34
32
|
def start_background_task(task, *args):
|
|
@@ -854,7 +852,7 @@ def sync_remote_profiler_folders(
|
|
|
854
852
|
"""Main function to sync test folders, handles both compressed and individual syncs."""
|
|
855
853
|
profiler_folder = Path(remote_folder_path).name
|
|
856
854
|
destination_dir = Path(
|
|
857
|
-
REPORT_DATA_DIRECTORY,
|
|
855
|
+
current_app.config["REPORT_DATA_DIRECTORY"],
|
|
858
856
|
path_prefix,
|
|
859
857
|
remote_connection.host,
|
|
860
858
|
current_app.config["PROFILER_DIRECTORY_NAME"],
|
|
@@ -878,7 +876,7 @@ def sync_remote_performance_folders(
|
|
|
878
876
|
remote_folder_path = performance.remotePath
|
|
879
877
|
profile_folder = Path(remote_folder_path).name
|
|
880
878
|
destination_dir = Path(
|
|
881
|
-
REPORT_DATA_DIRECTORY,
|
|
879
|
+
current_app.config["REPORT_DATA_DIRECTORY"],
|
|
882
880
|
path_prefix,
|
|
883
881
|
remote_connection.host,
|
|
884
882
|
current_app.config["PERFORMANCE_DIRECTORY_NAME"],
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{I as s}from"./index-BmDjQHI0.js";import{I as r}from"./index-CzNKtOwn.js";import{p as n,I as c}from"./index-8RVye9cY.js";function p(t,a){const o=n(t);return a===c.STANDARD?s[o]:r[o]}export{s as IconSvgPaths16,r as IconSvgPaths20,p as getIconPaths};
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["assets/allPaths-D6qA1aj4.js","assets/index-BmDjQHI0.js","assets/index-CzNKtOwn.js","assets/index-8RVye9cY.js","assets/index-BVzPYDVR.css"])))=>i.map(i=>d[i]);
|
|
2
|
+
import{_ as e}from"./index-8RVye9cY.js";const s=async(t,a)=>{const{getIconPaths:o}=await e(async()=>{const{getIconPaths:r}=await import("./allPaths-D6qA1aj4.js");return{getIconPaths:r}},__vite__mapDeps([0,1,2,3,4]));return o(t,a)};export{s as allPathsLoader};
|