tritonparse 0.2.4.dev20250923071613__tar.gz → 0.2.4.dev20250925071528__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tritonparse might be problematic. Click here for more details.
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.gitignore +1 -1
- {tritonparse-0.2.4.dev20250923071613/tritonparse.egg-info → tritonparse-0.2.4.dev20250925071528}/PKG-INFO +1 -1
- tritonparse-0.2.4.dev20250925071528/run.py +48 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/test_tritonparse.py +117 -0
- tritonparse-0.2.4.dev20250925071528/tritonparse/reproducer/cli.py +27 -0
- tritonparse-0.2.4.dev20250925071528/tritonparse/reproducer/ingestion/ndjson.py +235 -0
- tritonparse-0.2.4.dev20250925071528/tritonparse/reproducer/orchestrator.py +63 -0
- tritonparse-0.2.4.dev20250923071613/tritonparse/reproducer/utils.py → tritonparse-0.2.4.dev20250925071528/tritonparse/reproducer/templates/example.py +113 -21
- tritonparse-0.2.4.dev20250925071528/tritonparse/reproducer/templates/loader.py +57 -0
- tritonparse-0.2.4.dev20250925071528/tritonparse/reproducer/utils.py +302 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tools/prettify_ndjson.py +8 -6
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/utils.py +9 -20
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528/tritonparse.egg-info}/PKG-INFO +1 -1
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse.egg-info/SOURCES.txt +5 -0
- tritonparse-0.2.4.dev20250923071613/run.py +0 -14
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.ci/README.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.ci/install-project.sh +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.ci/install-triton-kernels.sh +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.ci/install-triton.sh +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.ci/run-tests.sh +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.ci/setup.sh +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.github/PAGES_SETUP.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.github/copilot-instructions.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.github/workflows/deploy-pages-standalone.yml +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.github/workflows/deploy-pages.yml +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.github/workflows/nightly-pypi.yml +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/.github/workflows/test.yml +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/CHANGELOG.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/CODE_OF_CONDUCT.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/CONTRIBUTING.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/LICENSE +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/Makefile +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/README.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/__init__.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/docs/README.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/docs/screenshots/code-comparison.png +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/docs/screenshots/kernel-overview.png +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/pyproject.toml +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/setup.cfg +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/README.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/__init__.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/logs/dedicated_log_triton_trace_findhao_.ndjson +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/parsed_output/dedicated_log_triton_trace_findhao__mapped.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/parsed_output/f0_fc0_a0_cai-.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/parsed_output/log_file_list.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/parsed_output_complex/dedicated_log_triton_trace_findhao__mapped.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/parsed_output_complex/log_file_list.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/example_output/repro/repro_context_20250816192455.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tests/test_add.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/__init__.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/common.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/event_diff.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/extract_source_mappings.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/ir_parser.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/mapper.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/reproducer/__init__.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/shared_vars.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/source_type.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/sourcemap_utils.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/structured_logging.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tools/__init__.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tools/decompress_bin_ndjson.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tools/format_fix.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tools/load_tensor.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tools/readme.md +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/tp_logger.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse/trace_processor.py +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse.egg-info/dependency_links.txt +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse.egg-info/requires.txt +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/tritonparse.egg-info/top_level.txt +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/eslint.config.js +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/index.html +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/package-lock.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/package.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/postcss.config.js +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/public/dedicated_log_triton_trace_findhao__mapped.ndjson.gz +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/public/f0_fc0_a0_cai-.ndjson +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/public/favicon.ico +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/public/logo.svg +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/scripts/inline-html.js +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/App.css +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/App.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/assets/react.svg +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/ArgumentViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/Callstack.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/CodeComparisonView.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/CodeViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/CompilationInfo.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/CopyCodeButton.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/DataSourceSelector.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/DiffComparisonView.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/DiffViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/ExternalLink.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/SingleCodeViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/StackDiffViewer.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/ToggleSwitch.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/TritonIRs.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/components/WelcomeScreen.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/context/FileDiffSession.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/index.css +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/main.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/pages/CodeView.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/pages/FileDiffView.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/pages/KernelOverview.tsx +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/utils/dataLoader.ts +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/utils/fbDetection.ts +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/utils/safeImport.ts +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/utils/tensor.ts +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/src/vite-env.d.ts +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/tailwind.config.js +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/tsconfig.app.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/tsconfig.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/tsconfig.node.json +0 -0
- {tritonparse-0.2.4.dev20250923071613 → tritonparse-0.2.4.dev20250925071528}/website/vite.config.ts +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tritonparse
|
|
3
|
-
Version: 0.2.4.
|
|
3
|
+
Version: 0.2.4.dev20250925071528
|
|
4
4
|
Summary: TritonParse: A Compiler Tracer, Visualizer, and mini-Reproducer Generator for Triton Kernels
|
|
5
5
|
Author-email: Yueming Hao <yhao@meta.com>
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
3
|
+
|
|
4
|
+
import argparse
|
|
5
|
+
|
|
6
|
+
from tritonparse.reproducer.cli import _add_reproducer_args
|
|
7
|
+
from tritonparse.reproducer.orchestrator import reproduce
|
|
8
|
+
|
|
9
|
+
from tritonparse.utils import _add_parse_args, unified_parse
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# We need this as an entrace for fbpkg
|
|
13
|
+
def main():
|
|
14
|
+
parser = argparse.ArgumentParser(description="tritonparse CLI")
|
|
15
|
+
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
16
|
+
|
|
17
|
+
parse_parser = subparsers.add_parser(
|
|
18
|
+
"parse", help="Parse triton structured logs", conflict_handler="resolve"
|
|
19
|
+
)
|
|
20
|
+
_add_parse_args(parse_parser)
|
|
21
|
+
parse_parser.set_defaults(func="parse")
|
|
22
|
+
|
|
23
|
+
repro_parser = subparsers.add_parser(
|
|
24
|
+
"reproduce", help="Build reproducer from trace file"
|
|
25
|
+
)
|
|
26
|
+
_add_reproducer_args(repro_parser)
|
|
27
|
+
repro_parser.set_defaults(func="reproduce")
|
|
28
|
+
|
|
29
|
+
args = parser.parse_args()
|
|
30
|
+
|
|
31
|
+
if args.func == "parse":
|
|
32
|
+
# Filter out routing-specific arguments before passing to unified_parse
|
|
33
|
+
parse_args = {
|
|
34
|
+
k: v for k, v in vars(args).items() if k not in ["command", "func"]
|
|
35
|
+
}
|
|
36
|
+
unified_parse(**parse_args)
|
|
37
|
+
elif args.func == "reproduce":
|
|
38
|
+
reproduce(
|
|
39
|
+
input_path=args.input,
|
|
40
|
+
line_index=args.line_index,
|
|
41
|
+
out_dir=args.out_dir,
|
|
42
|
+
template=args.template,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
if __name__ == "__main__":
|
|
47
|
+
# Do not add code here, it won't be run. Add them to the function called below.
|
|
48
|
+
main() # pragma: no cover
|
|
@@ -33,6 +33,7 @@ from triton.compiler import ASTSource, IRSource
|
|
|
33
33
|
|
|
34
34
|
# @manual=//triton:triton
|
|
35
35
|
from triton.knobs import CompileTimes
|
|
36
|
+
from tritonparse.common import is_fbcode
|
|
36
37
|
from tritonparse.structured_logging import convert, extract_python_source_info
|
|
37
38
|
|
|
38
39
|
HAS_TRITON_KERNELS = importlib.util.find_spec("triton_kernels") is not None
|
|
@@ -706,6 +707,122 @@ class TestTritonparseCUDA(unittest.TestCase):
|
|
|
706
707
|
print("✓ Cleaned up temporary directory")
|
|
707
708
|
tritonparse.structured_logging.clear_logging_config()
|
|
708
709
|
|
|
710
|
+
@unittest.skipIf(is_fbcode(), "Skip in internal FB environment")
|
|
711
|
+
@unittest.skipUnless(torch.cuda.is_available(), "CUDA not available")
|
|
712
|
+
def test_reproducer_end_to_end(self):
|
|
713
|
+
"""End-to-end test for reproducer: generate logs, build script, run it."""
|
|
714
|
+
import subprocess as _subprocess
|
|
715
|
+
import sys as _sys
|
|
716
|
+
from pathlib import Path as _Path
|
|
717
|
+
|
|
718
|
+
# 1) Prepare temp dirs
|
|
719
|
+
temp_dir = tempfile.mkdtemp()
|
|
720
|
+
logs_dir = os.path.join(temp_dir, "logs")
|
|
721
|
+
out_dir = os.path.join(temp_dir, "repro_output")
|
|
722
|
+
os.makedirs(logs_dir, exist_ok=True)
|
|
723
|
+
os.makedirs(out_dir, exist_ok=True)
|
|
724
|
+
|
|
725
|
+
# 2) Write a simple module-level Triton kernel to a temp file
|
|
726
|
+
kernel_dir = os.path.join(temp_dir, "kernels")
|
|
727
|
+
os.makedirs(kernel_dir, exist_ok=True)
|
|
728
|
+
kernel_file = os.path.join(kernel_dir, "simple_kernel.py")
|
|
729
|
+
kernel_src = (
|
|
730
|
+
"import triton\n"
|
|
731
|
+
"import triton.language as tl\n"
|
|
732
|
+
"import torch\n"
|
|
733
|
+
"\n"
|
|
734
|
+
"@triton.jit\n"
|
|
735
|
+
"def add_kernel(x_ptr, y_ptr, out_ptr, n_elements, BLOCK_SIZE: tl.constexpr):\n"
|
|
736
|
+
" pid = tl.program_id(axis=0)\n"
|
|
737
|
+
" block_start = pid * BLOCK_SIZE\n"
|
|
738
|
+
" offsets = block_start + tl.arange(0, BLOCK_SIZE)\n"
|
|
739
|
+
" mask = offsets < n_elements\n"
|
|
740
|
+
" x = tl.load(x_ptr + offsets, mask=mask)\n"
|
|
741
|
+
" y = tl.load(y_ptr + offsets, mask=mask)\n"
|
|
742
|
+
" tl.store(out_ptr + offsets, x + y, mask=mask)\n"
|
|
743
|
+
)
|
|
744
|
+
with open(kernel_file, "w", encoding="utf-8") as f:
|
|
745
|
+
f.write(kernel_src)
|
|
746
|
+
|
|
747
|
+
# 3) Generate logs by running the kernel once
|
|
748
|
+
tritonparse.structured_logging.init(logs_dir, enable_trace_launch=True)
|
|
749
|
+
try:
|
|
750
|
+
if kernel_dir not in _sys.path:
|
|
751
|
+
_sys.path.insert(0, kernel_dir)
|
|
752
|
+
import importlib as _importlib
|
|
753
|
+
|
|
754
|
+
mod = _importlib.import_module("simple_kernel")
|
|
755
|
+
device = torch.device("cuda:0")
|
|
756
|
+
torch.manual_seed(0)
|
|
757
|
+
n = 256
|
|
758
|
+
x = torch.randn((n,), device=device, dtype=torch.float32)
|
|
759
|
+
y = torch.randn((n,), device=device, dtype=torch.float32)
|
|
760
|
+
out = torch.empty_like(x)
|
|
761
|
+
BLOCK_SIZE = 64
|
|
762
|
+
grid = (triton.cdiv(n, BLOCK_SIZE),)
|
|
763
|
+
mod.add_kernel[grid](x, y, out, n, BLOCK_SIZE)
|
|
764
|
+
torch.cuda.synchronize()
|
|
765
|
+
finally:
|
|
766
|
+
tritonparse.structured_logging.clear_logging_config()
|
|
767
|
+
|
|
768
|
+
# 4) Find the NDJSON and compute launch event index
|
|
769
|
+
ndjson_files = [
|
|
770
|
+
os.path.join(logs_dir, f)
|
|
771
|
+
for f in os.listdir(logs_dir)
|
|
772
|
+
if f.endswith(".ndjson")
|
|
773
|
+
]
|
|
774
|
+
assert ndjson_files, f"No ndjson found in {logs_dir}"
|
|
775
|
+
ndjson_path = max(ndjson_files, key=os.path.getmtime)
|
|
776
|
+
|
|
777
|
+
from tritonparse.tools.prettify_ndjson import load_ndjson as _load_ndjson
|
|
778
|
+
|
|
779
|
+
events = _load_ndjson(_Path(ndjson_path), save_irs=True)
|
|
780
|
+
launch_indices = [
|
|
781
|
+
i for i, ev in enumerate(events) if ev.get("event_type") == "launch"
|
|
782
|
+
]
|
|
783
|
+
assert launch_indices, "No launch event found in ndjson"
|
|
784
|
+
line_index = launch_indices[0]
|
|
785
|
+
|
|
786
|
+
# 5) Build reproducer
|
|
787
|
+
from tritonparse.reproducer.orchestrator import reproduce
|
|
788
|
+
|
|
789
|
+
reproduce(
|
|
790
|
+
input_path=ndjson_path,
|
|
791
|
+
line_index=line_index,
|
|
792
|
+
out_dir=out_dir,
|
|
793
|
+
template="example",
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
# 6) Locate generated script and context under out_dir/add_kernel/
|
|
797
|
+
kernel_out_dir = os.path.join(out_dir, "add_kernel")
|
|
798
|
+
assert os.path.isdir(
|
|
799
|
+
kernel_out_dir
|
|
800
|
+
), f"Kernel output dir not found: {kernel_out_dir}"
|
|
801
|
+
gen_scripts = [f for f in os.listdir(kernel_out_dir) if f.endswith(".py")]
|
|
802
|
+
gen_jsons = [f for f in os.listdir(kernel_out_dir) if f.endswith(".json")]
|
|
803
|
+
assert gen_scripts, f"No generated script in {kernel_out_dir}"
|
|
804
|
+
assert gen_jsons, f"No generated context json in {kernel_out_dir}"
|
|
805
|
+
script_path = os.path.join(kernel_out_dir, sorted(gen_scripts)[-1])
|
|
806
|
+
|
|
807
|
+
# 7) Execute generated script and assert success output
|
|
808
|
+
proc = _subprocess.run(
|
|
809
|
+
[
|
|
810
|
+
_sys.executable,
|
|
811
|
+
script_path,
|
|
812
|
+
],
|
|
813
|
+
check=True,
|
|
814
|
+
capture_output=True,
|
|
815
|
+
text=True,
|
|
816
|
+
)
|
|
817
|
+
self.assertIn("Kernel execution finished.", proc.stdout)
|
|
818
|
+
|
|
819
|
+
# Cleanup
|
|
820
|
+
if should_keep_output():
|
|
821
|
+
print(f"✓ Preserving temporary directory (TEST_KEEP_OUTPUT=1): {temp_dir}")
|
|
822
|
+
else:
|
|
823
|
+
shutil.rmtree(temp_dir)
|
|
824
|
+
print("✓ Cleaned up temporary directory")
|
|
825
|
+
|
|
709
826
|
|
|
710
827
|
if __name__ == "__main__":
|
|
711
828
|
unittest.main()
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def _add_reproducer_args(parser: argparse.ArgumentParser) -> None:
|
|
5
|
+
"""Add common arguments for the reproducer to a parser."""
|
|
6
|
+
parser.add_argument("input", help="Path to the ndjson/ndjson.gz log file")
|
|
7
|
+
parser.add_argument(
|
|
8
|
+
"--line-index",
|
|
9
|
+
type=int,
|
|
10
|
+
help="The line number of the launch event in the input file to reproduce.",
|
|
11
|
+
)
|
|
12
|
+
parser.add_argument(
|
|
13
|
+
"--out-dir",
|
|
14
|
+
default="repro_output",
|
|
15
|
+
help=(
|
|
16
|
+
"Directory to save the reproducer script and context JSON. Defaults to "
|
|
17
|
+
"'repro_output/<kernel_name>/' if not provided."
|
|
18
|
+
),
|
|
19
|
+
)
|
|
20
|
+
parser.add_argument(
|
|
21
|
+
"--template",
|
|
22
|
+
default="example",
|
|
23
|
+
help=(
|
|
24
|
+
"Template name (builtin, without .py) or a filesystem path to a .py file. "
|
|
25
|
+
"Defaults to 'example'."
|
|
26
|
+
),
|
|
27
|
+
)
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
3
|
+
|
|
4
|
+
from tritonparse.tp_logger import logger
|
|
5
|
+
|
|
6
|
+
# Sentinel object to mark arguments that should be skipped during processing
|
|
7
|
+
_SKIP = object()
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class KernelInfo:
|
|
12
|
+
"""Information about a Triton kernel extracted from compilation events."""
|
|
13
|
+
|
|
14
|
+
file_path: str
|
|
15
|
+
function_name: str
|
|
16
|
+
source_code: str
|
|
17
|
+
call_stack: List[Dict[str, Any]]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class ContextBundle:
|
|
22
|
+
"""Bundle of all context information needed to reproduce a kernel launch."""
|
|
23
|
+
|
|
24
|
+
kernel_info: KernelInfo
|
|
25
|
+
compile: Dict[str, Any]
|
|
26
|
+
launch: Dict[str, Any]
|
|
27
|
+
args: Dict[str, Any]
|
|
28
|
+
tensor_args: Dict[str, Any]
|
|
29
|
+
raw_launch_event: Dict[str, Any]
|
|
30
|
+
raw_comp_event: Dict[str, Any]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_launch_and_compilation_events(
|
|
34
|
+
events: List[Dict[str, Any]], line_index: Optional[int] = None
|
|
35
|
+
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
36
|
+
"""
|
|
37
|
+
Extract launch and compilation events from the event list.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
events: List of parsed event dictionaries.
|
|
41
|
+
line_index: Index of the launch event to process.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Tuple of (launch_event, compilation_event).
|
|
45
|
+
|
|
46
|
+
Raises:
|
|
47
|
+
ValueError: If the event at line_index is not a launch event.
|
|
48
|
+
RuntimeError: If compilation event cannot be found or is ambiguous.
|
|
49
|
+
"""
|
|
50
|
+
if line_index is None or line_index >= len(events):
|
|
51
|
+
raise ValueError(f"Invalid line_index: {line_index}")
|
|
52
|
+
|
|
53
|
+
launch_event = events[line_index]
|
|
54
|
+
if launch_event["event_type"] != "launch":
|
|
55
|
+
raise ValueError(f"Event at index {line_index} is not a launch event")
|
|
56
|
+
|
|
57
|
+
comp_meta = launch_event.get("compilation_metadata", {})
|
|
58
|
+
comp_hash = comp_meta.get("hash")
|
|
59
|
+
if not comp_hash:
|
|
60
|
+
raise RuntimeError("Could not find compilation hash in launch event.")
|
|
61
|
+
|
|
62
|
+
comp_event = None
|
|
63
|
+
for event in events:
|
|
64
|
+
if (
|
|
65
|
+
event["event_type"] == "compilation"
|
|
66
|
+
and event.get("payload", {}).get("metadata", {}).get("hash") == comp_hash
|
|
67
|
+
):
|
|
68
|
+
comp_event = event
|
|
69
|
+
break
|
|
70
|
+
if not comp_event:
|
|
71
|
+
raise RuntimeError(f"Could not find compilation event for hash {comp_hash}.")
|
|
72
|
+
return launch_event, comp_event
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def get_kernel_info(comp_event: Dict[str, Any]) -> KernelInfo:
|
|
76
|
+
"""
|
|
77
|
+
Extract kernel information from a compilation event.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
comp_event: Compilation event dictionary containing kernel metadata.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
KernelInfo object with extracted kernel details.
|
|
84
|
+
|
|
85
|
+
Raises:
|
|
86
|
+
RuntimeError: If file path or function name cannot be resolved.
|
|
87
|
+
"""
|
|
88
|
+
payload = comp_event.get("payload") or {}
|
|
89
|
+
py_source = payload.get("python_source") or {}
|
|
90
|
+
code = py_source.get("code", "")
|
|
91
|
+
|
|
92
|
+
# Extract file path and function name
|
|
93
|
+
file_path = py_source.get("file_path")
|
|
94
|
+
# The function name is in the compilation metadata payload
|
|
95
|
+
func_name = (comp_event.get("payload", {}).get("metadata") or {}).get("name")
|
|
96
|
+
|
|
97
|
+
# Find '@triton.jit' decorator and slice the string from there
|
|
98
|
+
jit_marker = "@triton.jit"
|
|
99
|
+
jit_pos = code.find(jit_marker)
|
|
100
|
+
if jit_pos != -1:
|
|
101
|
+
code = code[jit_pos:]
|
|
102
|
+
logger.debug("Extracted kernel source starting from '@triton.jit'.")
|
|
103
|
+
|
|
104
|
+
if not file_path or not func_name:
|
|
105
|
+
raise RuntimeError(
|
|
106
|
+
"Could not resolve kernel file path or function name from compilation event."
|
|
107
|
+
" The import-based strategy cannot proceed."
|
|
108
|
+
)
|
|
109
|
+
return KernelInfo(file_path, func_name, code, comp_event.get("stack", []))
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _decode_arg(raw: Any) -> Any:
|
|
113
|
+
"""
|
|
114
|
+
Decode a raw argument value from event data.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
raw: Raw argument value from event data.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Decoded argument value, or _SKIP sentinel for tensors.
|
|
121
|
+
"""
|
|
122
|
+
if not isinstance(raw, dict):
|
|
123
|
+
return raw
|
|
124
|
+
t = raw.get("type")
|
|
125
|
+
if t == "tensor":
|
|
126
|
+
return _SKIP
|
|
127
|
+
if t == "NoneType":
|
|
128
|
+
return None
|
|
129
|
+
return raw.get("value", raw.get("repr"))
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def _pack_args(args: Dict[str, Any]) -> Dict[str, Any]:
|
|
133
|
+
"""
|
|
134
|
+
Pack argument values into a standardized format.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
args: Dictionary of argument names to values.
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
Dictionary with packed argument information including type and metadata.
|
|
141
|
+
"""
|
|
142
|
+
packed = {}
|
|
143
|
+
for k, v in args.items():
|
|
144
|
+
t = v.get("type") if isinstance(v, dict) else None
|
|
145
|
+
if t == "tensor":
|
|
146
|
+
packed[k] = {
|
|
147
|
+
"type": "tensor",
|
|
148
|
+
"shape": v.get("shape") if isinstance(v, dict) else None,
|
|
149
|
+
"dtype": v.get("dtype") if isinstance(v, dict) else None,
|
|
150
|
+
"device": v.get("device") if isinstance(v, dict) else None,
|
|
151
|
+
"stride": v.get("stride") if isinstance(v, dict) else None,
|
|
152
|
+
"is_contiguous": (
|
|
153
|
+
v.get("is_contiguous") if isinstance(v, dict) else None
|
|
154
|
+
),
|
|
155
|
+
"numel": v.get("numel") if isinstance(v, dict) else None,
|
|
156
|
+
}
|
|
157
|
+
else:
|
|
158
|
+
# scalar / NoneType etc
|
|
159
|
+
if isinstance(v, dict):
|
|
160
|
+
packed[k] = {
|
|
161
|
+
"type": v.get("type"),
|
|
162
|
+
"value": v.get("value", v.get("repr")),
|
|
163
|
+
}
|
|
164
|
+
else:
|
|
165
|
+
packed[k] = {
|
|
166
|
+
"type": None,
|
|
167
|
+
"value": v,
|
|
168
|
+
}
|
|
169
|
+
return packed
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def build_context_bundle(
|
|
173
|
+
events: List[Dict[str, Any]], line_index: Optional[int] = None
|
|
174
|
+
):
|
|
175
|
+
"""
|
|
176
|
+
Build a complete context bundle from events and line index.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
events: List of parsed event dictionaries.
|
|
180
|
+
line_index: Index of the launch event to process.
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
ContextBundle containing all information needed to reproduce the kernel launch.
|
|
184
|
+
|
|
185
|
+
Raises:
|
|
186
|
+
ValueError: If line_index is invalid or event is not a launch event.
|
|
187
|
+
RuntimeError: If compilation event cannot be found.
|
|
188
|
+
"""
|
|
189
|
+
launch_event, comp_event = get_launch_and_compilation_events(events, line_index)
|
|
190
|
+
kernel_info = get_kernel_info(comp_event)
|
|
191
|
+
grid = launch_event.get("grid")
|
|
192
|
+
extracted_args = launch_event.get("extracted_args", {})
|
|
193
|
+
comp_meta = launch_event.get("compilation_metadata", {})
|
|
194
|
+
|
|
195
|
+
# Compile metadata subset we care about
|
|
196
|
+
compile_block = {
|
|
197
|
+
"num_warps": comp_meta.get("num_warps"),
|
|
198
|
+
"num_stages": comp_meta.get("num_stages"),
|
|
199
|
+
"arch": comp_meta.get("arch"),
|
|
200
|
+
"backend": comp_meta.get("backend_name") or comp_meta.get("backend"),
|
|
201
|
+
"triton_version": comp_meta.get("triton_version"),
|
|
202
|
+
"hash": comp_meta.get("hash"),
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
# kwargs: include constexpr + explicit scalars used for launch (skip tensor args)
|
|
206
|
+
kwargs = {}
|
|
207
|
+
for k, v in extracted_args.items():
|
|
208
|
+
val = _decode_arg(v)
|
|
209
|
+
if val is _SKIP:
|
|
210
|
+
continue
|
|
211
|
+
kwargs[k] = val
|
|
212
|
+
|
|
213
|
+
# tensor args: only tensors
|
|
214
|
+
raw_tensor_args = {
|
|
215
|
+
k: v
|
|
216
|
+
for k, v in extracted_args.items()
|
|
217
|
+
if isinstance(v, dict) and v.get("type") == "tensor"
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
primitive_args = _pack_args(extracted_args)
|
|
221
|
+
tensor_args = _pack_args(raw_tensor_args)
|
|
222
|
+
launch_block = {
|
|
223
|
+
"grid": grid,
|
|
224
|
+
"kwargs": kwargs,
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return ContextBundle(
|
|
228
|
+
kernel_info,
|
|
229
|
+
compile_block,
|
|
230
|
+
launch_block,
|
|
231
|
+
primitive_args,
|
|
232
|
+
tensor_args,
|
|
233
|
+
launch_event,
|
|
234
|
+
comp_event,
|
|
235
|
+
)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from tritonparse.reproducer.ingestion.ndjson import build_context_bundle
|
|
4
|
+
from tritonparse.reproducer.templates.loader import load_template_code
|
|
5
|
+
from tritonparse.reproducer.utils import (
|
|
6
|
+
_generate_import_statements,
|
|
7
|
+
_generate_invocation_snippet,
|
|
8
|
+
_parse_kernel_signature,
|
|
9
|
+
determine_output_paths,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
from tritonparse.tools.prettify_ndjson import load_ndjson, save_prettified_json
|
|
13
|
+
from tritonparse.tp_logger import logger
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def reproduce(
|
|
17
|
+
input_path: str,
|
|
18
|
+
line_index: int,
|
|
19
|
+
out_dir: str,
|
|
20
|
+
template: str,
|
|
21
|
+
):
|
|
22
|
+
"""
|
|
23
|
+
Generate a reproducer script from NDJSON trace file.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
input_path: Path to the NDJSON trace file.
|
|
27
|
+
line_index: Line index of the launch event to reproduce.
|
|
28
|
+
out_dir: Output directory for reproducer files.
|
|
29
|
+
"""
|
|
30
|
+
logger.debug(f"Building bundle from {input_path} at line {line_index}")
|
|
31
|
+
events = load_ndjson(Path(input_path), save_irs=True)
|
|
32
|
+
logger.debug(f"Loaded {len(events)} events")
|
|
33
|
+
|
|
34
|
+
# Build context bundle from the specified launch event
|
|
35
|
+
context_bundle = build_context_bundle(events, line_index)
|
|
36
|
+
logger.debug(
|
|
37
|
+
f"Built context bundle for kernel: {context_bundle.kernel_info.function_name}"
|
|
38
|
+
)
|
|
39
|
+
out_py_path, temp_json_path = determine_output_paths(
|
|
40
|
+
out_dir, context_bundle.kernel_info.function_name
|
|
41
|
+
)
|
|
42
|
+
save_prettified_json(context_bundle.raw_launch_event, temp_json_path)
|
|
43
|
+
logger.debug("Loading reproducer template.")
|
|
44
|
+
template_code = load_template_code(template)
|
|
45
|
+
final_code = template_code.replace(
|
|
46
|
+
"{{JSON_FILE_NAME_PLACEHOLDER}}", temp_json_path.name
|
|
47
|
+
)
|
|
48
|
+
sys_stmt, import_statement = _generate_import_statements(context_bundle.kernel_info)
|
|
49
|
+
final_code = final_code.replace("# {{KERNEL_SYSPATH_PLACEHOLDER}}", sys_stmt)
|
|
50
|
+
final_code = final_code.replace("# {{KERNEL_IMPORT_PLACEHOLDER}}", import_statement)
|
|
51
|
+
source_code = context_bundle.kernel_info.source_code
|
|
52
|
+
pos_args, kw_args = _parse_kernel_signature(source_code)
|
|
53
|
+
invocation_snippet = _generate_invocation_snippet(pos_args, kw_args)
|
|
54
|
+
final_code = final_code.replace(
|
|
55
|
+
"# {{KERNEL_INVOCATION_PLACEHOLDER}}", invocation_snippet
|
|
56
|
+
)
|
|
57
|
+
out_py_path.write_text(final_code, encoding="utf-8")
|
|
58
|
+
logger.info(
|
|
59
|
+
"REPRODUCER_OUTPUT script=%s json=%s kernel=%s",
|
|
60
|
+
str(out_py_path.resolve()),
|
|
61
|
+
str(temp_json_path.resolve()),
|
|
62
|
+
context_bundle.kernel_info.function_name,
|
|
63
|
+
)
|