tritonparse 0.2.4.dev20251003071457__tar.gz → 0.2.4.dev20251004071438__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tritonparse might be problematic. Click here for more details.
- {tritonparse-0.2.4.dev20251003071457/tritonparse.egg-info → tritonparse-0.2.4.dev20251004071438}/PKG-INFO +1 -1
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/test_tritonparse.py +366 -24
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/common.py +5 -2
- tritonparse-0.2.4.dev20251004071438/tritonparse/context_manager.py +50 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/orchestrator.py +17 -17
- tritonparse-0.2.4.dev20251004071438/tritonparse/reproducer/placeholder_replacer.py +115 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/templates/example.py +12 -5
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/shared_vars.py +2 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/structured_logging.py +10 -10
- tritonparse-0.2.4.dev20251004071438/tritonparse/tools/__init__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/trace_processor.py +7 -4
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/utils.py +9 -1
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438/tritonparse.egg-info}/PKG-INFO +1 -1
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse.egg-info/SOURCES.txt +2 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/pages/KernelOverview.tsx +31 -1
- tritonparse-0.2.4.dev20251003071457/tritonparse/context_manager.py +0 -23
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.ci/README.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.ci/install-project.sh +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.ci/install-triton-kernels.sh +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.ci/install-triton.sh +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.ci/run-tests.sh +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.ci/setup.sh +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.github/PAGES_SETUP.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.github/copilot-instructions.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.github/workflows/deploy-pages-standalone.yml +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.github/workflows/deploy-pages.yml +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.github/workflows/nightly-pypi.yml +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.github/workflows/test.yml +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/.gitignore +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/CHANGELOG.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/CODE_OF_CONDUCT.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/CONTRIBUTING.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/LICENSE +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/Makefile +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/README.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/__init__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/docs/README.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/docs/screenshots/code-comparison.png +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/docs/screenshots/kernel-overview.png +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/pyproject.toml +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/run.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/setup.cfg +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/README.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/__init__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/logs/dedicated_log_triton_trace_findhao_.ndjson +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/parsed_output/dedicated_log_triton_trace_findhao__mapped.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/parsed_output/f0_fc0_a0_cai-.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/parsed_output/log_file_list.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/parsed_output_complex/dedicated_log_triton_trace_findhao__mapped.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/parsed_output_complex/log_file_list.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/example_output/repro/repro_context_20250816192455.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tests/test_add.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/__init__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/__main__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/event_diff.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/extract_source_mappings.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/ir_parser.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/mapper.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/__init__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/cli.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/ingestion/ndjson.py +0 -0
- {tritonparse-0.2.4.dev20251003071457/tritonparse/tools → tritonparse-0.2.4.dev20251004071438/tritonparse/reproducer/templates}/__init__.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/templates/loader.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/reproducer/utils.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/source_type.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/sourcemap_utils.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tools/decompress_bin_ndjson.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tools/disasm.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tools/format_fix.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tools/load_tensor.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tools/prettify_ndjson.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tools/readme.md +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/tp_logger.py +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse.egg-info/dependency_links.txt +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse.egg-info/entry_points.txt +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse.egg-info/requires.txt +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse.egg-info/top_level.txt +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/eslint.config.js +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/index.html +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/package-lock.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/package.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/postcss.config.js +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/public/dedicated_log_triton_trace_findhao__mapped.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/public/f0_fc0_a0_cai-.ndjson +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/public/favicon.ico +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/public/logo.svg +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/scripts/inline-html.js +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/App.css +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/App.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/assets/react.svg +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/ArgumentViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/Callstack.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/CodeComparisonView.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/CodeViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/CompilationInfo.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/CopyCodeButton.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/DataSourceSelector.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/DiffComparisonView.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/DiffViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/ExternalLink.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/SingleCodeViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/StackDiffViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/ToggleSwitch.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/TritonIRs.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/components/WelcomeScreen.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/context/FileDiffSession.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/index.css +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/main.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/pages/CodeView.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/pages/FileDiffView.tsx +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/utils/dataLoader.ts +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/utils/fbDetection.ts +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/utils/safeImport.ts +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/utils/tensor.ts +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/src/vite-env.d.ts +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/tailwind.config.js +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/tsconfig.app.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/tsconfig.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/tsconfig.node.json +0 -0
- {tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/website/vite.config.ts +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tritonparse
|
|
3
|
-
Version: 0.2.4.
|
|
3
|
+
Version: 0.2.4.dev20251004071438
|
|
4
4
|
Summary: TritonParse: A Compiler Tracer, Visualizer, and mini-Reproducer Generator for Triton Kernels
|
|
5
5
|
Author-email: Yueming Hao <yhao@meta.com>
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -18,31 +18,126 @@ from dataclasses import dataclass
|
|
|
18
18
|
from typing import Any, Union
|
|
19
19
|
|
|
20
20
|
import torch
|
|
21
|
-
|
|
21
|
+
import torch._inductor.config as inductor_config
|
|
22
22
|
import triton # @manual=//triton:triton
|
|
23
|
-
|
|
24
23
|
import triton.language as tl # @manual=//triton:triton
|
|
25
|
-
|
|
24
|
+
import tritonparse.context_manager
|
|
26
25
|
import tritonparse.structured_logging
|
|
27
|
-
import tritonparse.tools.disasm
|
|
28
26
|
import tritonparse.utils
|
|
27
|
+
from triton import knobs # @manual=//triton:triton
|
|
29
28
|
|
|
30
29
|
from triton.compiler import ASTSource, IRSource # @manual=//triton:triton
|
|
31
|
-
|
|
32
30
|
from triton.knobs import CompileTimes # @manual=//triton:triton
|
|
33
31
|
from tritonparse.common import is_fbcode
|
|
32
|
+
from tritonparse.shared_vars import TEST_KEEP_OUTPUT
|
|
34
33
|
from tritonparse.structured_logging import convert, extract_python_source_info
|
|
35
34
|
from tritonparse.tools.disasm import is_nvdisasm_available
|
|
36
35
|
|
|
37
36
|
HAS_TRITON_KERNELS = importlib.util.find_spec("triton_kernels") is not None
|
|
38
37
|
|
|
39
38
|
|
|
40
|
-
def
|
|
41
|
-
"""
|
|
39
|
+
def create_fresh_triton_cache():
|
|
40
|
+
"""Create a fresh Triton cache directory and return cache management context"""
|
|
41
|
+
cache_dir = tempfile.mkdtemp(prefix="triton_cache_")
|
|
42
|
+
return cache_dir, knobs.cache.scope()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def setup_fresh_triton_environment(cache_dir):
|
|
46
|
+
"""Setup fresh Triton environment with isolated cache"""
|
|
47
|
+
# Set up isolated cache directory
|
|
48
|
+
original_cache_dir = getattr(knobs.cache, "dir", None)
|
|
49
|
+
knobs.cache.dir = cache_dir
|
|
50
|
+
|
|
51
|
+
# Save and reset compilation settings
|
|
52
|
+
original_always_compile = knobs.compilation.always_compile
|
|
53
|
+
knobs.compilation.always_compile = True
|
|
54
|
+
|
|
55
|
+
# Reset hooks to clean state
|
|
56
|
+
original_jit_cache_hook = knobs.runtime.jit_cache_hook
|
|
57
|
+
original_jit_post_compile_hook = knobs.runtime.jit_post_compile_hook
|
|
58
|
+
original_launch_enter_hook = knobs.runtime.launch_enter_hook
|
|
59
|
+
original_compilation_listener = knobs.compilation.listener
|
|
60
|
+
|
|
61
|
+
knobs.runtime.jit_cache_hook = None
|
|
62
|
+
knobs.runtime.jit_post_compile_hook = None
|
|
63
|
+
knobs.runtime.launch_enter_hook = None
|
|
64
|
+
knobs.compilation.listener = None
|
|
65
|
+
|
|
66
|
+
return {
|
|
67
|
+
"original_cache_dir": original_cache_dir,
|
|
68
|
+
"original_always_compile": original_always_compile,
|
|
69
|
+
"original_jit_cache_hook": original_jit_cache_hook,
|
|
70
|
+
"original_jit_post_compile_hook": original_jit_post_compile_hook,
|
|
71
|
+
"original_launch_enter_hook": original_launch_enter_hook,
|
|
72
|
+
"original_compilation_listener": original_compilation_listener,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def restore_triton_environment(original_settings):
|
|
77
|
+
"""Restore original Triton environment settings"""
|
|
78
|
+
if original_settings["original_cache_dir"] is not None:
|
|
79
|
+
knobs.cache.dir = original_settings["original_cache_dir"]
|
|
42
80
|
|
|
43
|
-
|
|
81
|
+
knobs.compilation.always_compile = original_settings["original_always_compile"]
|
|
82
|
+
knobs.runtime.jit_cache_hook = original_settings["original_jit_cache_hook"]
|
|
83
|
+
knobs.runtime.jit_post_compile_hook = original_settings[
|
|
84
|
+
"original_jit_post_compile_hook"
|
|
85
|
+
]
|
|
86
|
+
knobs.runtime.launch_enter_hook = original_settings["original_launch_enter_hook"]
|
|
87
|
+
knobs.compilation.listener = original_settings["original_compilation_listener"]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def clear_all_caches(*kernels):
|
|
91
|
+
"""
|
|
92
|
+
Clear all compilation caches comprehensively.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
*kernels: Triton kernel objects to clear device caches for.
|
|
96
|
+
Can pass multiple kernels or none at all.
|
|
97
|
+
|
|
98
|
+
This function performs a comprehensive cache clearing operation:
|
|
99
|
+
1. Resets PyTorch compiler state (torch.compiler, dynamo, inductor)
|
|
100
|
+
2. Clears Triton kernel device caches and resets hashes for provided kernels
|
|
101
|
+
3. Creates a new Triton cache directory
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
tuple: (new_cache_dir, original_cache_dir) for cleanup purposes
|
|
44
105
|
"""
|
|
45
|
-
|
|
106
|
+
print("\n=== Clearing all caches ===")
|
|
107
|
+
|
|
108
|
+
# Reset torch compiler state
|
|
109
|
+
torch.compiler.reset()
|
|
110
|
+
torch._dynamo.reset()
|
|
111
|
+
torch._inductor.metrics.reset()
|
|
112
|
+
print("✓ Reset torch compiler, dynamo, and inductor state")
|
|
113
|
+
|
|
114
|
+
# Clear Triton kernel device caches for all provided kernels
|
|
115
|
+
kernels_cleared = 0
|
|
116
|
+
for kernel in kernels:
|
|
117
|
+
if hasattr(kernel, "device_caches"):
|
|
118
|
+
for device_id in kernel.device_caches:
|
|
119
|
+
# device_caches[device_id] is a tuple of cache objects
|
|
120
|
+
device_cache_tuple = kernel.device_caches[device_id]
|
|
121
|
+
for cache_obj in device_cache_tuple:
|
|
122
|
+
if hasattr(cache_obj, "clear"):
|
|
123
|
+
cache_obj.clear()
|
|
124
|
+
kernel.hash = None # Reset kernel hash to force recompilation
|
|
125
|
+
kernels_cleared += 1
|
|
126
|
+
|
|
127
|
+
if kernels_cleared > 0:
|
|
128
|
+
print(
|
|
129
|
+
f"✓ Cleared device caches and reset hashes for {kernels_cleared} kernel(s)"
|
|
130
|
+
)
|
|
131
|
+
else:
|
|
132
|
+
print("✓ No kernels provided for device cache clearing")
|
|
133
|
+
|
|
134
|
+
# Create a completely fresh cache directory
|
|
135
|
+
new_cache_dir = tempfile.mkdtemp(prefix="triton_fresh_cache_")
|
|
136
|
+
original_cache_dir = knobs.cache.dir
|
|
137
|
+
knobs.cache.dir = new_cache_dir
|
|
138
|
+
print(f"✓ Created fresh Triton cache directory: {new_cache_dir}")
|
|
139
|
+
|
|
140
|
+
return new_cache_dir, original_cache_dir
|
|
46
141
|
|
|
47
142
|
|
|
48
143
|
class TestTritonparseCPU(unittest.TestCase):
|
|
@@ -108,22 +203,53 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
108
203
|
|
|
109
204
|
self.cuda_device = torch.device("cuda:0")
|
|
110
205
|
|
|
111
|
-
#
|
|
112
|
-
self.
|
|
113
|
-
self.
|
|
114
|
-
self.
|
|
115
|
-
|
|
206
|
+
# Set up fresh Triton cache environment
|
|
207
|
+
self.triton_cache_dir, self.cache_scope = create_fresh_triton_cache()
|
|
208
|
+
self.cache_scope.__enter__() # Enter the cache scope context
|
|
209
|
+
self.original_triton_settings = setup_fresh_triton_environment(
|
|
210
|
+
self.triton_cache_dir
|
|
211
|
+
)
|
|
116
212
|
|
|
117
|
-
#
|
|
118
|
-
|
|
213
|
+
# Save original settings for restoration
|
|
214
|
+
self.prev_listener = knobs.compilation.listener
|
|
215
|
+
self.prev_always_compile = knobs.compilation.always_compile
|
|
216
|
+
self.prev_jit_post_compile_hook = knobs.runtime.jit_post_compile_hook
|
|
217
|
+
self.prev_launch_enter_hook = knobs.runtime.launch_enter_hook
|
|
119
218
|
|
|
120
219
|
def tearDown(self):
|
|
121
220
|
"""Restore original triton settings"""
|
|
122
221
|
# Always restore original settings, even if test fails
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
222
|
+
try:
|
|
223
|
+
# Restore Triton environment
|
|
224
|
+
restore_triton_environment(self.original_triton_settings)
|
|
225
|
+
|
|
226
|
+
# Exit cache scope and cleanup
|
|
227
|
+
self.cache_scope.__exit__(None, None, None)
|
|
228
|
+
if os.path.exists(self.triton_cache_dir):
|
|
229
|
+
shutil.rmtree(self.triton_cache_dir, ignore_errors=True)
|
|
230
|
+
|
|
231
|
+
except Exception as e:
|
|
232
|
+
print(f"Warning: Failed to cleanup Triton environment: {e}")
|
|
233
|
+
|
|
234
|
+
def setup_test_with_fresh_cache(self):
|
|
235
|
+
"""Setup individual test with completely fresh cache"""
|
|
236
|
+
# Create a new cache directory for this specific test
|
|
237
|
+
test_cache_dir = tempfile.mkdtemp(prefix="triton_test_cache_")
|
|
238
|
+
|
|
239
|
+
# Save current cache dir and set new one
|
|
240
|
+
prev_cache_dir = knobs.cache.dir
|
|
241
|
+
knobs.cache.dir = test_cache_dir
|
|
242
|
+
|
|
243
|
+
return test_cache_dir, prev_cache_dir
|
|
244
|
+
|
|
245
|
+
def cleanup_test_cache(self, test_cache_dir, prev_cache_dir):
|
|
246
|
+
"""Cleanup test-specific cache"""
|
|
247
|
+
# Restore previous cache dir
|
|
248
|
+
knobs.cache.dir = prev_cache_dir
|
|
249
|
+
|
|
250
|
+
# Cleanup test cache directory
|
|
251
|
+
if os.path.exists(test_cache_dir):
|
|
252
|
+
shutil.rmtree(test_cache_dir, ignore_errors=True)
|
|
127
253
|
|
|
128
254
|
@unittest.skipUnless(torch.cuda.is_available(), "CUDA not available")
|
|
129
255
|
def test_extract_python_source_info(self):
|
|
@@ -396,7 +522,7 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
396
522
|
|
|
397
523
|
finally:
|
|
398
524
|
# Clean up
|
|
399
|
-
if
|
|
525
|
+
if TEST_KEEP_OUTPUT:
|
|
400
526
|
print(
|
|
401
527
|
f"✓ Preserving temporary directory (TEST_KEEP_OUTPUT=1): {temp_dir}"
|
|
402
528
|
)
|
|
@@ -405,6 +531,222 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
405
531
|
print("✓ Cleaned up temporary directory")
|
|
406
532
|
tritonparse.structured_logging.clear_logging_config()
|
|
407
533
|
|
|
534
|
+
@unittest.skipUnless(torch.cuda.is_available(), "CUDA not available")
|
|
535
|
+
def test_context_manager_with_split_compilations(self):
|
|
536
|
+
"""Test TritonParseManager context manager with split_inductor_compilations parameter"""
|
|
537
|
+
|
|
538
|
+
# Setup fresh cache for this test (on top of the class-level fresh cache)
|
|
539
|
+
test_cache_dir, prev_cache_dir = self.setup_test_with_fresh_cache()
|
|
540
|
+
|
|
541
|
+
# Define Triton kernel
|
|
542
|
+
@triton.jit
|
|
543
|
+
def add_kernel(
|
|
544
|
+
a_ptr,
|
|
545
|
+
b_ptr,
|
|
546
|
+
c_ptr,
|
|
547
|
+
n_elements,
|
|
548
|
+
BLOCK_SIZE: tl.constexpr,
|
|
549
|
+
):
|
|
550
|
+
pid = tl.program_id(axis=0)
|
|
551
|
+
block_start = pid * BLOCK_SIZE
|
|
552
|
+
offsets = block_start + tl.arange(0, BLOCK_SIZE)
|
|
553
|
+
mask = offsets < n_elements
|
|
554
|
+
|
|
555
|
+
a = tl.load(a_ptr + offsets, mask=mask)
|
|
556
|
+
b = tl.load(b_ptr + offsets, mask=mask)
|
|
557
|
+
c = a + b
|
|
558
|
+
tl.store(c_ptr + offsets, c, mask=mask)
|
|
559
|
+
|
|
560
|
+
def tensor_add_triton(a, b):
|
|
561
|
+
n_elements = a.numel()
|
|
562
|
+
c = torch.empty_like(a)
|
|
563
|
+
BLOCK_SIZE = 1024
|
|
564
|
+
grid = (triton.cdiv(n_elements, BLOCK_SIZE),)
|
|
565
|
+
add_kernel[grid](a, b, c, n_elements, BLOCK_SIZE)
|
|
566
|
+
return c
|
|
567
|
+
|
|
568
|
+
# Simple function for torch.compile (triggers inductor compilation)
|
|
569
|
+
def simple_add(a, b):
|
|
570
|
+
return a + b
|
|
571
|
+
|
|
572
|
+
# Prepare test data
|
|
573
|
+
torch.manual_seed(0)
|
|
574
|
+
size = (512, 512)
|
|
575
|
+
a = torch.randn(size, device=self.cuda_device, dtype=torch.float32)
|
|
576
|
+
b = torch.randn(size, device=self.cuda_device, dtype=torch.float32)
|
|
577
|
+
|
|
578
|
+
# Create temp directories for output
|
|
579
|
+
temp_output_dir_split_true = tempfile.mkdtemp()
|
|
580
|
+
temp_output_dir_split_false = tempfile.mkdtemp()
|
|
581
|
+
|
|
582
|
+
# Test 1: split_inductor_compilations=True
|
|
583
|
+
print("\n=== Testing split_inductor_compilations=True ===")
|
|
584
|
+
with tritonparse.context_manager.TritonParseManager(
|
|
585
|
+
enable_trace_launch=True,
|
|
586
|
+
split_inductor_compilations=True,
|
|
587
|
+
out=temp_output_dir_split_true,
|
|
588
|
+
) as manager:
|
|
589
|
+
assert os.path.exists(manager.dir_path), "Temporary directory should exist"
|
|
590
|
+
print(f"Temporary directory created: {manager.dir_path}")
|
|
591
|
+
|
|
592
|
+
# Run Triton kernel
|
|
593
|
+
c_triton = tensor_add_triton(a, b)
|
|
594
|
+
c_triton.sum()
|
|
595
|
+
torch.compiler.reset()
|
|
596
|
+
with inductor_config.patch(force_disable_caches=True):
|
|
597
|
+
# Run torch.compile to trigger inductor compilation
|
|
598
|
+
compiled_add = torch.compile(simple_add)
|
|
599
|
+
c_compiled = compiled_add(a, b)
|
|
600
|
+
c_compiled.sum()
|
|
601
|
+
|
|
602
|
+
torch.cuda.synchronize()
|
|
603
|
+
|
|
604
|
+
# Verify log files are generated
|
|
605
|
+
log_files = os.listdir(manager.dir_path)
|
|
606
|
+
assert len(log_files) > 0, "Log files should be generated"
|
|
607
|
+
print(f"Generated {len(log_files)} log file(s)")
|
|
608
|
+
# After exiting context manager, verify behavior
|
|
609
|
+
# Verify parsed output exists
|
|
610
|
+
assert os.path.exists(
|
|
611
|
+
temp_output_dir_split_true
|
|
612
|
+
), "Parsed output directory should exist"
|
|
613
|
+
print(f"Parsed output directory: {temp_output_dir_split_true}")
|
|
614
|
+
|
|
615
|
+
# Check output files for split=True
|
|
616
|
+
output_files_split_true = sorted(os.listdir(temp_output_dir_split_true))
|
|
617
|
+
num_files_split_true = len(output_files_split_true)
|
|
618
|
+
print(f"Output files (split=True): {num_files_split_true} files")
|
|
619
|
+
for f in output_files_split_true:
|
|
620
|
+
print(f" - {f}")
|
|
621
|
+
|
|
622
|
+
# === Clear caches between tests ===
|
|
623
|
+
second_test_cache_dir, original_cache_dir = clear_all_caches(add_kernel)
|
|
624
|
+
|
|
625
|
+
# Test 2: split_inductor_compilations=False
|
|
626
|
+
print("\n=== Testing split_inductor_compilations=False ===")
|
|
627
|
+
with tritonparse.context_manager.TritonParseManager(
|
|
628
|
+
enable_trace_launch=True,
|
|
629
|
+
split_inductor_compilations=False,
|
|
630
|
+
out=temp_output_dir_split_false,
|
|
631
|
+
) as manager:
|
|
632
|
+
assert os.path.exists(manager.dir_path), "Temporary directory should exist"
|
|
633
|
+
print(f"Temporary directory created: {manager.dir_path}")
|
|
634
|
+
|
|
635
|
+
# Run the same operations
|
|
636
|
+
c_triton = tensor_add_triton(a, b)
|
|
637
|
+
c_triton.sum()
|
|
638
|
+
torch.compiler.reset()
|
|
639
|
+
with inductor_config.patch(force_disable_caches=True):
|
|
640
|
+
compiled_add = torch.compile(simple_add)
|
|
641
|
+
c_compiled = compiled_add(a, b)
|
|
642
|
+
c_compiled.sum()
|
|
643
|
+
|
|
644
|
+
torch.cuda.synchronize()
|
|
645
|
+
|
|
646
|
+
log_files = os.listdir(manager.dir_path)
|
|
647
|
+
assert len(log_files) > 0, "Log files should be generated"
|
|
648
|
+
print(f"Generated {len(log_files)} log file(s)")
|
|
649
|
+
# After exiting context manager, verify behavior
|
|
650
|
+
# Verify parsed output exists
|
|
651
|
+
assert os.path.exists(
|
|
652
|
+
temp_output_dir_split_false
|
|
653
|
+
), "Parsed output directory should exist"
|
|
654
|
+
print(f"Parsed output directory: {temp_output_dir_split_false}")
|
|
655
|
+
|
|
656
|
+
# Check output files for split=False
|
|
657
|
+
output_files_split_false = sorted(os.listdir(temp_output_dir_split_false))
|
|
658
|
+
num_files_split_false = len(output_files_split_false)
|
|
659
|
+
print(f"Output files (split=False): {num_files_split_false} files")
|
|
660
|
+
for f in output_files_split_false:
|
|
661
|
+
print(f" - {f}")
|
|
662
|
+
|
|
663
|
+
# Check compilation events in parsed output for split=False
|
|
664
|
+
ndjson_gz_files_split_false = [
|
|
665
|
+
f for f in output_files_split_false if f.endswith(".ndjson.gz")
|
|
666
|
+
]
|
|
667
|
+
assert (
|
|
668
|
+
len(ndjson_gz_files_split_false) > 0
|
|
669
|
+
), "No .ndjson.gz files found in split=False parsed output"
|
|
670
|
+
|
|
671
|
+
compilation_count_split_false = 0
|
|
672
|
+
compilation_names_found = []
|
|
673
|
+
expected_compilation_names = {"add_kernel", "triton_poi_fused_add_0"}
|
|
674
|
+
|
|
675
|
+
for ndjson_gz_file in ndjson_gz_files_split_false:
|
|
676
|
+
ndjson_gz_path = os.path.join(temp_output_dir_split_false, ndjson_gz_file)
|
|
677
|
+
with gzip.open(ndjson_gz_path, "rt", encoding="utf-8") as f:
|
|
678
|
+
for line in f:
|
|
679
|
+
try:
|
|
680
|
+
event_data = json.loads(line.strip())
|
|
681
|
+
if event_data.get("event_type") == "compilation":
|
|
682
|
+
compilation_count_split_false += 1
|
|
683
|
+
|
|
684
|
+
# Extract and validate the compilation name
|
|
685
|
+
compilation_name = (
|
|
686
|
+
event_data.get("payload", {})
|
|
687
|
+
.get("metadata", {})
|
|
688
|
+
.get("name")
|
|
689
|
+
)
|
|
690
|
+
if compilation_name:
|
|
691
|
+
compilation_names_found.append(compilation_name)
|
|
692
|
+
assert compilation_name in expected_compilation_names, (
|
|
693
|
+
f"Unexpected compilation name: '{compilation_name}'. "
|
|
694
|
+
f"Expected one of: {expected_compilation_names}"
|
|
695
|
+
)
|
|
696
|
+
except json.JSONDecodeError:
|
|
697
|
+
continue
|
|
698
|
+
|
|
699
|
+
print(
|
|
700
|
+
f"Compilation events found (split=False): {compilation_count_split_false}"
|
|
701
|
+
)
|
|
702
|
+
print(f"Compilation names found: {compilation_names_found}")
|
|
703
|
+
|
|
704
|
+
assert (
|
|
705
|
+
compilation_count_split_false > 0
|
|
706
|
+
), "Expected at least 1 compilation event in split=False output"
|
|
707
|
+
|
|
708
|
+
# Verify all compilation names are from the expected set
|
|
709
|
+
unique_names_found = set(compilation_names_found)
|
|
710
|
+
assert unique_names_found.issubset(expected_compilation_names), (
|
|
711
|
+
f"Found unexpected compilation names: {unique_names_found - expected_compilation_names}. "
|
|
712
|
+
f"Expected only: {expected_compilation_names}"
|
|
713
|
+
)
|
|
714
|
+
print(f"✓ All compilation names are valid: {unique_names_found}")
|
|
715
|
+
|
|
716
|
+
# Verify the key difference: split=False should have one fewer file
|
|
717
|
+
assert (
|
|
718
|
+
num_files_split_false == num_files_split_true - 1
|
|
719
|
+
), f"split=False should have one fewer file (expected {num_files_split_true - 1}, got {num_files_split_false})"
|
|
720
|
+
print(
|
|
721
|
+
f"✓ Verified: split=False has {num_files_split_false} files, split=True has {num_files_split_true} files (difference: 1)"
|
|
722
|
+
)
|
|
723
|
+
|
|
724
|
+
# Clean up test outputs
|
|
725
|
+
try:
|
|
726
|
+
if TEST_KEEP_OUTPUT:
|
|
727
|
+
print(
|
|
728
|
+
f"\n✓ Preserving output directories (TEST_KEEP_OUTPUT=1):\n split=True: {temp_output_dir_split_true}\n split=False: {temp_output_dir_split_false}"
|
|
729
|
+
)
|
|
730
|
+
else:
|
|
731
|
+
if os.path.exists(temp_output_dir_split_true):
|
|
732
|
+
shutil.rmtree(temp_output_dir_split_true)
|
|
733
|
+
if os.path.exists(temp_output_dir_split_false):
|
|
734
|
+
shutil.rmtree(temp_output_dir_split_false)
|
|
735
|
+
print("✓ Cleaned up output directories")
|
|
736
|
+
except Exception as e:
|
|
737
|
+
print(f"Warning: Failed to clean up output directories: {e}")
|
|
738
|
+
|
|
739
|
+
finally:
|
|
740
|
+
# Cleanup test-specific caches
|
|
741
|
+
self.cleanup_test_cache(test_cache_dir, prev_cache_dir)
|
|
742
|
+
|
|
743
|
+
# Cleanup second test cache directory
|
|
744
|
+
if "second_test_cache_dir" in locals():
|
|
745
|
+
knobs.cache.dir = original_cache_dir # Restore cache dir first
|
|
746
|
+
if os.path.exists(second_test_cache_dir):
|
|
747
|
+
shutil.rmtree(second_test_cache_dir, ignore_errors=True)
|
|
748
|
+
print(f"✓ Cleaned up second test cache: {second_test_cache_dir}")
|
|
749
|
+
|
|
408
750
|
@unittest.skipUnless(torch.cuda.is_available(), "CUDA not available")
|
|
409
751
|
def test_complex_kernels(self):
|
|
410
752
|
"""
|
|
@@ -702,7 +1044,7 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
702
1044
|
|
|
703
1045
|
finally:
|
|
704
1046
|
# Clean up
|
|
705
|
-
if
|
|
1047
|
+
if TEST_KEEP_OUTPUT:
|
|
706
1048
|
print(
|
|
707
1049
|
f"✓ Preserving temporary directory (TEST_KEEP_OUTPUT=1): {temp_dir}"
|
|
708
1050
|
)
|
|
@@ -795,7 +1137,7 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
795
1137
|
|
|
796
1138
|
finally:
|
|
797
1139
|
# Clean up
|
|
798
|
-
if
|
|
1140
|
+
if TEST_KEEP_OUTPUT:
|
|
799
1141
|
print(
|
|
800
1142
|
f"✓ Preserving temporary directory (TEST_KEEP_OUTPUT=1): {temp_dir}"
|
|
801
1143
|
)
|
|
@@ -916,7 +1258,7 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
916
1258
|
self.assertIn("Kernel execution finished.", proc.stdout)
|
|
917
1259
|
|
|
918
1260
|
# Cleanup
|
|
919
|
-
if
|
|
1261
|
+
if TEST_KEEP_OUTPUT:
|
|
920
1262
|
print(f"✓ Preserving temporary directory (TEST_KEEP_OUTPUT=1): {temp_dir}")
|
|
921
1263
|
else:
|
|
922
1264
|
shutil.rmtree(temp_dir)
|
{tritonparse-0.2.4.dev20251003071457 → tritonparse-0.2.4.dev20251004071438}/tritonparse/common.py
RENAMED
|
@@ -262,6 +262,7 @@ def parse_logs(
|
|
|
262
262
|
rank_config: RankConfig,
|
|
263
263
|
verbose: bool = False,
|
|
264
264
|
tritonparse_url_prefix: str = "",
|
|
265
|
+
split_inductor_compilations: bool = True,
|
|
265
266
|
) -> Tuple[str, dict]:
|
|
266
267
|
"""
|
|
267
268
|
Parse logs.
|
|
@@ -271,7 +272,9 @@ def parse_logs(
|
|
|
271
272
|
rank_config: Rank configuration
|
|
272
273
|
verbose: Whether to print verbose information
|
|
273
274
|
tritonparse_url_prefix: URL prefix for the generated file mapping
|
|
274
|
-
|
|
275
|
+
split_inductor_compilations: Whether to split
|
|
276
|
+
output files by frame_id, compile_id, attempt_id, and compiled_autograd_id.
|
|
277
|
+
Defaults to True. This rule follows tlparse's behavior.
|
|
275
278
|
Returns:
|
|
276
279
|
Tuple of (parsed log directory, file mapping)
|
|
277
280
|
"""
|
|
@@ -327,7 +330,7 @@ def parse_logs(
|
|
|
327
330
|
relative_path = rank.to_string("")
|
|
328
331
|
output_dir = os.path.join(parsed_log_dir, relative_path)
|
|
329
332
|
# Parse the file
|
|
330
|
-
parse_single_file(input_file, output_dir)
|
|
333
|
+
parse_single_file(input_file, output_dir, split_inductor_compilations)
|
|
331
334
|
# Collect generated files after parsing and gzip them immediately
|
|
332
335
|
if os.path.exists(output_dir):
|
|
333
336
|
generated_files = []
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import tempfile
|
|
4
|
+
|
|
5
|
+
from tritonparse.shared_vars import TEST_KEEP_OUTPUT
|
|
6
|
+
|
|
7
|
+
from tritonparse.structured_logging import clear_logging_config, init
|
|
8
|
+
from tritonparse.utils import unified_parse
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def createUniqueTempDirectory():
|
|
12
|
+
return tempfile.mkdtemp()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class TritonParseManager:
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
enable_trace_launch=False,
|
|
19
|
+
split_inductor_compilations=True,
|
|
20
|
+
**parse_kwargs,
|
|
21
|
+
):
|
|
22
|
+
"""
|
|
23
|
+
Context manager for tritonparse workflow.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
enable_trace_launch: Whether to enable trace launch
|
|
27
|
+
split_inductor_compilations: Whether to split inductor compilations in the output
|
|
28
|
+
**parse_kwargs: Additional keyword arguments to pass to unified_parse
|
|
29
|
+
"""
|
|
30
|
+
self.enable_trace_launch = enable_trace_launch
|
|
31
|
+
self.split_inductor_compilations = split_inductor_compilations
|
|
32
|
+
self.parse_kwargs = parse_kwargs
|
|
33
|
+
self.dir_path = None
|
|
34
|
+
self.output_link = None
|
|
35
|
+
|
|
36
|
+
def __enter__(self):
|
|
37
|
+
self.dir_path = createUniqueTempDirectory()
|
|
38
|
+
init(self.dir_path, enable_trace_launch=self.enable_trace_launch)
|
|
39
|
+
return self
|
|
40
|
+
|
|
41
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
42
|
+
self.output_link = unified_parse(
|
|
43
|
+
source=self.dir_path,
|
|
44
|
+
overwrite=True,
|
|
45
|
+
split_inductor_compilations=self.split_inductor_compilations,
|
|
46
|
+
**self.parse_kwargs,
|
|
47
|
+
)
|
|
48
|
+
clear_logging_config()
|
|
49
|
+
if os.path.exists(self.dir_path) and not TEST_KEEP_OUTPUT:
|
|
50
|
+
shutil.rmtree(self.dir_path)
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
|
+
from typing import Optional
|
|
2
3
|
|
|
3
4
|
from tritonparse.reproducer.ingestion.ndjson import build_context_bundle
|
|
4
|
-
from tritonparse.reproducer.
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
_generate_invocation_snippet,
|
|
8
|
-
_parse_kernel_signature,
|
|
9
|
-
determine_output_paths,
|
|
5
|
+
from tritonparse.reproducer.placeholder_replacer import (
|
|
6
|
+
DefaultPlaceholderReplacer,
|
|
7
|
+
PlaceholderReplacer,
|
|
10
8
|
)
|
|
9
|
+
from tritonparse.reproducer.templates.loader import load_template_code
|
|
10
|
+
from tritonparse.reproducer.utils import determine_output_paths
|
|
11
11
|
|
|
12
12
|
from tritonparse.tools.prettify_ndjson import load_ndjson, save_prettified_json
|
|
13
13
|
from tritonparse.tp_logger import logger
|
|
@@ -18,6 +18,7 @@ def reproduce(
|
|
|
18
18
|
line_index: int,
|
|
19
19
|
out_dir: str,
|
|
20
20
|
template: str,
|
|
21
|
+
replacer: Optional[PlaceholderReplacer] = None,
|
|
21
22
|
) -> dict[str, Path]:
|
|
22
23
|
"""
|
|
23
24
|
Generate a reproducer script from NDJSON trace file.
|
|
@@ -26,6 +27,8 @@ def reproduce(
|
|
|
26
27
|
input_path: Path to the NDJSON trace file.
|
|
27
28
|
line_index: Line index of the launch event to reproduce.
|
|
28
29
|
out_dir: Output directory for reproducer files.
|
|
30
|
+
template: Template name to use for the reproducer.
|
|
31
|
+
replacer: Optional custom PlaceholderReplacer instance. If None, uses DefaultPlaceholderReplacer.
|
|
29
32
|
"""
|
|
30
33
|
logger.debug(f"Building bundle from {input_path} at line {line_index}")
|
|
31
34
|
events = load_ndjson(Path(input_path))
|
|
@@ -42,18 +45,15 @@ def reproduce(
|
|
|
42
45
|
save_prettified_json(context_bundle.raw_launch_event, temp_json_path)
|
|
43
46
|
logger.debug("Loading reproducer template.")
|
|
44
47
|
template_code = load_template_code(template)
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
final_code =
|
|
51
|
-
|
|
52
|
-
pos_args, kw_args = _parse_kernel_signature(source_code)
|
|
53
|
-
invocation_snippet = _generate_invocation_snippet(pos_args, kw_args)
|
|
54
|
-
final_code = final_code.replace(
|
|
55
|
-
"# {{KERNEL_INVOCATION_PLACEHOLDER}}", invocation_snippet
|
|
48
|
+
|
|
49
|
+
# Use PlaceholderReplacer to replace all placeholders
|
|
50
|
+
# If no custom replacer provided, use the default one
|
|
51
|
+
if replacer is None:
|
|
52
|
+
replacer = DefaultPlaceholderReplacer()
|
|
53
|
+
final_code = replacer.replace(
|
|
54
|
+
template_code, context_bundle, temp_json_path=temp_json_path
|
|
56
55
|
)
|
|
56
|
+
|
|
57
57
|
out_py_path.write_text(final_code, encoding="utf-8")
|
|
58
58
|
|
|
59
59
|
filepath = context_bundle.kernel_info.file_path
|