tritonparse 0.3.2.dev20251210071601__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tritonparse might be problematic. Click here for more details.

Files changed (62) hide show
  1. tritonparse/__init__.py +0 -0
  2. tritonparse/__main__.py +7 -0
  3. tritonparse/cli.py +110 -0
  4. tritonparse/common.py +409 -0
  5. tritonparse/context_manager.py +64 -0
  6. tritonparse/event_diff.py +122 -0
  7. tritonparse/extract_source_mappings.py +49 -0
  8. tritonparse/info/__init__.py +30 -0
  9. tritonparse/info/cli.py +121 -0
  10. tritonparse/info/kernel_query.py +209 -0
  11. tritonparse/info/parse_helper.py +70 -0
  12. tritonparse/ir_analysis.py +427 -0
  13. tritonparse/ir_parser.py +365 -0
  14. tritonparse/mapper.py +102 -0
  15. tritonparse/reproducer/__init__.py +0 -0
  16. tritonparse/reproducer/ast_analyzer.py +636 -0
  17. tritonparse/reproducer/cli.py +72 -0
  18. tritonparse/reproducer/consolidated_result.py +52 -0
  19. tritonparse/reproducer/function_extractor.py +228 -0
  20. tritonparse/reproducer/import_info.py +25 -0
  21. tritonparse/reproducer/import_parser.py +178 -0
  22. tritonparse/reproducer/import_resolver.py +151 -0
  23. tritonparse/reproducer/ingestion/ndjson.py +237 -0
  24. tritonparse/reproducer/multi_file_analyzer.py +824 -0
  25. tritonparse/reproducer/orchestrator.py +110 -0
  26. tritonparse/reproducer/placeholder_replacer.py +335 -0
  27. tritonparse/reproducer/templates/__init__.py +0 -0
  28. tritonparse/reproducer/templates/example.py +38 -0
  29. tritonparse/reproducer/templates/loader.py +59 -0
  30. tritonparse/reproducer/templates/tritonbench.py +106 -0
  31. tritonparse/reproducer/templates/utils.py +48 -0
  32. tritonparse/reproducer/tests/__init__.py +0 -0
  33. tritonparse/reproducer/tests/artifacts/__init__.py +5 -0
  34. tritonparse/reproducer/tests/artifacts/triton_fused_kernel.py +65 -0
  35. tritonparse/reproducer/tests/artifacts/triton_preprocess.py +16 -0
  36. tritonparse/reproducer/tests/artifacts/triton_utils.py +14 -0
  37. tritonparse/reproducer/tests/test_import_parser.py +164 -0
  38. tritonparse/reproducer/tests/test_import_resolver.py +88 -0
  39. tritonparse/reproducer/tests/test_multi_file_analyzer.py +118 -0
  40. tritonparse/reproducer/types.py +20 -0
  41. tritonparse/reproducer/utils.py +580 -0
  42. tritonparse/shared_vars.py +12 -0
  43. tritonparse/source_type.py +56 -0
  44. tritonparse/sourcemap_utils.py +96 -0
  45. tritonparse/structured_logging.py +1634 -0
  46. tritonparse/tools/__init__.py +0 -0
  47. tritonparse/tools/decompress_bin_ndjson.py +120 -0
  48. tritonparse/tools/disasm.py +81 -0
  49. tritonparse/tools/extract_irs.py +244 -0
  50. tritonparse/tools/format_fix.py +151 -0
  51. tritonparse/tools/load_tensor.py +76 -0
  52. tritonparse/tools/prettify_ndjson.py +334 -0
  53. tritonparse/tools/readme.md +37 -0
  54. tritonparse/tp_logger.py +9 -0
  55. tritonparse/trace_processor.py +367 -0
  56. tritonparse/utils.py +155 -0
  57. tritonparse-0.3.2.dev20251210071601.dist-info/METADATA +195 -0
  58. tritonparse-0.3.2.dev20251210071601.dist-info/RECORD +62 -0
  59. tritonparse-0.3.2.dev20251210071601.dist-info/WHEEL +5 -0
  60. tritonparse-0.3.2.dev20251210071601.dist-info/entry_points.txt +2 -0
  61. tritonparse-0.3.2.dev20251210071601.dist-info/licenses/LICENSE +29 -0
  62. tritonparse-0.3.2.dev20251210071601.dist-info/top_level.txt +1 -0
@@ -0,0 +1,334 @@
1
+ #!/usr/bin/env python3
2
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
3
+
4
+ """
5
+ Convert an NDJSON file to a prettified JSON file.
6
+
7
+ This script takes an NDJSON (newline-delimited JSON) file and converts it to a
8
+ standard human-readable JSON file where each line becomes an element in a JSON array, with
9
+ pretty formatting applied.
10
+
11
+ Example:
12
+ Input NDJSON file (data.ndjson):
13
+ {"name": "Alice", "age": 30}
14
+ {"name": "Bob", "age": 25}
15
+ {"name": "Charlie", "age": 35}
16
+
17
+ Output JSON file (data_prettified.json):
18
+ [
19
+ {
20
+ "age": 30,
21
+ "name": "Alice"
22
+ },
23
+ {
24
+ "age": 25,
25
+ "name": "Bob"
26
+ },
27
+ {
28
+ "age": 35,
29
+ "name": "Charlie"
30
+ }
31
+ ]
32
+
33
+ Usage:
34
+ python prettify_ndjson.py data.ndjson
35
+ python prettify_ndjson.py --lines 1,3 data.ndjson # Only process lines 1 and 3
36
+ python prettify_ndjson.py --not-save-irs logs.ndjson # Remove large fields for compilation events
37
+
38
+
39
+ """
40
+
41
+ import argparse
42
+ import gzip
43
+ import json
44
+ import sys
45
+ from pathlib import Path
46
+ from typing import Any, List, Union
47
+
48
+
49
+ def _is_gzip_file(file_path: Path) -> bool:
50
+ """Check if file is gzip compressed (.gz or .bin.ndjson)."""
51
+ path_str = str(file_path)
52
+ return path_str.endswith(".gz") or path_str.endswith(".bin.ndjson")
53
+
54
+
55
+ def parse_line_ranges(lines_arg: str) -> set[int]:
56
+ """
57
+ Parse line ranges from string like "1,2,3,5-10" into a set of line numbers.
58
+
59
+ Line numbers use 1-based indexing (first line is line 1, not 0).
60
+
61
+ Args:
62
+ lines_arg: String containing comma-separated line numbers and ranges
63
+ Examples: "1", "1,2,3", "5-10", "1,3,5-10,15"
64
+
65
+ Returns:
66
+ Set of line numbers (1-based indexing, where 1 = first line)
67
+
68
+ Raises:
69
+ ValueError: If the format is invalid or contains non-positive numbers
70
+ """
71
+ line_numbers = set()
72
+
73
+ if not lines_arg.strip():
74
+ return line_numbers
75
+
76
+ parts = lines_arg.split(",")
77
+ for part in parts:
78
+ part = part.strip()
79
+ if not part:
80
+ continue
81
+
82
+ if "-" in part:
83
+ # Handle range like "5-10"
84
+ try:
85
+ start, end = part.split("-", 1)
86
+ start_num = int(start.strip())
87
+ end_num = int(end.strip())
88
+ if start_num <= 0 or end_num <= 0:
89
+ raise ValueError("Line numbers must be positive")
90
+ if start_num > end_num:
91
+ raise ValueError(f"Invalid range: {part} (start > end)")
92
+ line_numbers.update(range(start_num, end_num + 1))
93
+ except ValueError as e:
94
+ if "invalid literal" in str(e):
95
+ raise ValueError(f"Invalid range format: {part}")
96
+ raise
97
+ else:
98
+ # Handle single number like "1"
99
+ try:
100
+ line_num = int(part)
101
+ if line_num <= 0:
102
+ raise ValueError("Line numbers must be positive")
103
+ line_numbers.add(line_num)
104
+ except ValueError:
105
+ raise ValueError(f"Invalid line number: {part}")
106
+
107
+ return line_numbers
108
+
109
+
110
+ def load_ndjson(
111
+ file_path: Path, not_save_irs: bool = False, line_filter: set[int] | None = None
112
+ ) -> List[Any]:
113
+ """
114
+ Load NDJSON file and return list of JSON objects.
115
+
116
+ Supports uncompressed (.ndjson), gzip compressed (.ndjson.gz),
117
+ and gzip member concatenation (.bin.ndjson) formats.
118
+
119
+ Args:
120
+ file_path: Path to the NDJSON file
121
+ not_save_irs: Whether to NOT save file_content and python_source for compilation events
122
+ line_filter: Set of line numbers to include (1-based indexing), None means include all
123
+
124
+ Returns:
125
+ List of parsed JSON objects
126
+
127
+ Raises:
128
+ FileNotFoundError: If the input file doesn't exist
129
+ json.JSONDecodeError: If a line contains invalid JSON
130
+ """
131
+ json_objects = []
132
+ filtered_compilation_events = 0
133
+ total_lines_processed = 0
134
+
135
+ # Determine if file is gzip compressed
136
+ is_compressed = _is_gzip_file(file_path)
137
+ opener = gzip.open if is_compressed else open
138
+ mode = "rt" if is_compressed else "r"
139
+
140
+ try:
141
+ with opener(file_path, mode, encoding="utf-8") as f:
142
+ # enumerate(f, 1) starts line numbering from 1 (1-based indexing)
143
+ for line_num, line in enumerate(f, 1):
144
+ line = line.strip()
145
+ if not line: # Skip empty lines
146
+ continue
147
+
148
+ # Skip line if line filtering is enabled and this line is not in the filter
149
+ # line_num is 1-based (first line = 1, second line = 2, etc.)
150
+ if line_filter is not None and line_num not in line_filter:
151
+ continue
152
+
153
+ total_lines_processed += 1
154
+
155
+ try:
156
+ json_obj = json.loads(line)
157
+
158
+ # Filter out file_content and python_source for compilation events if not_save_irs is True
159
+ if not_save_irs and isinstance(json_obj, dict):
160
+ event_type = json_obj.get("event_type")
161
+ if event_type == "compilation":
162
+ # Remove file_content and python_source from payload if they exist
163
+ payload = json_obj.get("payload")
164
+ if isinstance(payload, dict):
165
+ fields_to_remove = []
166
+ if "file_content" in payload:
167
+ fields_to_remove.append("file_content")
168
+ if "python_source" in payload:
169
+ fields_to_remove.append("python_source")
170
+
171
+ if fields_to_remove:
172
+ payload = (
173
+ payload.copy()
174
+ ) # Create a copy to avoid modifying original
175
+ for field in fields_to_remove:
176
+ del payload[field]
177
+ json_obj = (
178
+ json_obj.copy()
179
+ ) # Create a copy of the main object
180
+ json_obj["payload"] = payload
181
+ filtered_compilation_events += 1
182
+
183
+ json_objects.append(json_obj)
184
+ except json.JSONDecodeError as e:
185
+ print(
186
+ f"Error parsing JSON on line {line_num}: {e}", file=sys.stderr
187
+ )
188
+ print(f"Problematic line: {line[:100]}...", file=sys.stderr)
189
+ raise
190
+
191
+ except FileNotFoundError:
192
+ print(f"Error: File '{file_path}' not found.", file=sys.stderr)
193
+ raise
194
+ except (OSError, UnicodeDecodeError) as e:
195
+ print(f"Error reading file '{file_path}': {e}", file=sys.stderr)
196
+ raise
197
+
198
+ # Print informational messages
199
+ if line_filter is not None:
200
+ if line_filter:
201
+ print(
202
+ f"Line filtering: processed {total_lines_processed} out of {len(line_filter)} specified lines"
203
+ )
204
+ else:
205
+ print("Line filtering: no valid lines specified")
206
+
207
+ # Print warning if compilation events were filtered
208
+ if not_save_irs and filtered_compilation_events > 0:
209
+ print(
210
+ f"WARNING: Removed 'file_content' and 'python_source' fields from {filtered_compilation_events} compilation events to reduce file size.",
211
+ file=sys.stderr,
212
+ )
213
+ print(
214
+ " Re-run without --not-save-irs flag to preserve these fields if needed.",
215
+ file=sys.stderr,
216
+ )
217
+
218
+ return json_objects
219
+
220
+
221
+ def save_prettified_json(
222
+ json_objects: Union[List[Any], Any], output_path: Path
223
+ ) -> None:
224
+ """
225
+ Save JSON data to a prettified JSON file.
226
+
227
+ Args:
228
+ json_objects: Either a list of JSON objects or a single JSON-serializable object
229
+ output_path: Path where to save the prettified JSON file
230
+ """
231
+ try:
232
+ with open(output_path, "w", encoding="utf-8") as f:
233
+ json.dump(json_objects, f, indent=2, ensure_ascii=False, sort_keys=True)
234
+ print(f"Successfully converted to prettified JSON: {output_path}")
235
+ except OSError as e:
236
+ print(f"Error writing to file '{output_path}': {e}", file=sys.stderr)
237
+ raise
238
+
239
+
240
+ def main():
241
+ """Main function to handle command line arguments and orchestrate the conversion."""
242
+ parser = argparse.ArgumentParser(
243
+ description="Convert NDJSON file to prettified JSON file",
244
+ formatter_class=argparse.RawDescriptionHelpFormatter,
245
+ epilog="""
246
+ Examples:
247
+ python prettify_ndjson.py data.ndjson
248
+ python prettify_ndjson.py /path/to/logs.ndjson
249
+ """,
250
+ )
251
+
252
+ parser.add_argument(
253
+ "ndjson_file", type=str, help="Path to the NDJSON file to convert"
254
+ )
255
+
256
+ parser.add_argument(
257
+ "--not-save-irs",
258
+ action="store_true",
259
+ default=False,
260
+ help="Do not save file_content and python_source for compilation events (default: False, removes these fields to reduce size)",
261
+ )
262
+
263
+ parser.add_argument(
264
+ "--lines",
265
+ type=str,
266
+ help="Specify line numbers to include using 1-based indexing (e.g., '1,2,3,5-10'). "
267
+ "Line 1 is the first line of the file. Only these lines from the original NDJSON will be processed. "
268
+ "Supports individual lines (1,2,3) and ranges (5-10).",
269
+ )
270
+
271
+ parser.add_argument(
272
+ "-o",
273
+ "--output",
274
+ type=str,
275
+ help="Specify output file path (default: {input_stem}_prettified.json in the same directory as input)",
276
+ )
277
+
278
+ args = parser.parse_args()
279
+
280
+ # Convert to Path object and validate
281
+ input_path = Path(args.ndjson_file)
282
+
283
+ if not input_path.exists():
284
+ print(f"Error: File '{input_path}' does not exist.", file=sys.stderr)
285
+ sys.exit(1)
286
+
287
+ if not input_path.is_file():
288
+ print(f"Error: '{input_path}' is not a file.", file=sys.stderr)
289
+ sys.exit(1)
290
+
291
+ # Generate output filename
292
+ if args.output:
293
+ output_path = Path(args.output)
294
+ else:
295
+ # Default: original_prettified.json in same directory as input
296
+ output_path = input_path.parent / f"{input_path.stem}_prettified.json"
297
+
298
+ try:
299
+ # Parse line filter if provided
300
+ line_filter = None
301
+ if args.lines:
302
+ try:
303
+ line_filter = parse_line_ranges(args.lines)
304
+ print(
305
+ f"Line filtering enabled: will process {len(line_filter)} specified lines"
306
+ )
307
+ except ValueError as e:
308
+ print(f"Error parsing --lines argument: {e}", file=sys.stderr)
309
+ sys.exit(1)
310
+
311
+ # Load NDJSON file
312
+ print(f"Loading NDJSON file: {input_path}")
313
+ if args.not_save_irs:
314
+ print(
315
+ "Filtering out file_content and python_source from compilation events to reduce size"
316
+ )
317
+ json_objects = load_ndjson(
318
+ input_path, not_save_irs=args.not_save_irs, line_filter=line_filter
319
+ )
320
+ print(f"Loaded {len(json_objects)} JSON objects")
321
+
322
+ # Save as prettified JSON
323
+ print(f"Saving prettified JSON to: {output_path}")
324
+ save_prettified_json(json_objects, output_path)
325
+
326
+ print("Conversion completed successfully!")
327
+
328
+ except Exception as e:
329
+ print(f"Conversion failed: {e}", file=sys.stderr)
330
+ sys.exit(1)
331
+
332
+
333
+ if __name__ == "__main__":
334
+ main()
@@ -0,0 +1,37 @@
1
+ The tool scripts in this folder are used separately. They are not part of the main tritonparse functionality.
2
+
3
+ ## Available Tools
4
+
5
+ ### extract_irs.py
6
+ Extract IR (Intermediate Representation) files from NDJSON trace logs.
7
+
8
+ **Usage:**
9
+ ```bash
10
+ python extract_irs.py -i <input.ndjson> --line <line_number> -o <output_folder>
11
+ ```
12
+
13
+ **Arguments:**
14
+ - `-i, --input`: Path to the input NDJSON file
15
+ - `--line`: Line number to extract (0-based indexing, where 0 = first line)
16
+ - `-o, --output`: Output directory to save extracted IR files
17
+ - `--kernel-name`: (Optional) Custom kernel name for output files
18
+
19
+ **Examples:**
20
+ ```bash
21
+ # Extract IRs from the first line (line 0)
22
+ python extract_irs.py -i logs.ndjson --line 0 -o output_folder
23
+
24
+ # Extract from line 5
25
+ python extract_irs.py -i logs.ndjson --line 5 -o ./irs
26
+
27
+ # Specify custom kernel name
28
+ python extract_irs.py -i logs.ndjson --line 0 -o ./irs --kernel-name my_kernel
29
+ ```
30
+
31
+ **Extracted Files:**
32
+ - `*.ttir` - Triton IR
33
+ - `*.ttgir` - Triton GPU IR
34
+ - `*.llir` - LLVM IR
35
+ - `*.ptx` - PTX assembly
36
+ - `*.json` - Kernel metadata
37
+ - `*_source.py` - Python source code (if available)
@@ -0,0 +1,9 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+
3
+ import logging
4
+
5
+ logger = logging.getLogger("tritonparse")
6
+ # Configure logging
7
+ logging.basicConfig(
8
+ level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
9
+ )