tritonparse 0.3.1.dev20251026071454__py3-none-any.whl → 0.3.1.dev20251028071524__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tritonparse might be problematic. Click here for more details.

tritonparse/cli.py CHANGED
@@ -70,7 +70,7 @@ def main():
70
70
  elif args.func == "reproduce":
71
71
  reproduce(
72
72
  input_path=args.input,
73
- line_index=args.line,
73
+ line_index=args.line - 1, # Convert 1-based line number to 0-based index
74
74
  out_dir=args.out_dir,
75
75
  template=args.template,
76
76
  kernel_import=args.kernel_import,
@@ -42,7 +42,7 @@ def process_amd_gcn_bufferops(
42
42
  ) -> dict[str, int]:
43
43
  ir_content = load_ir_contents(key, file_content, file_path)
44
44
  # TODO: Add atomics
45
- io_keys = ["global_load_", "global_store_", "buffer_load_", "buffer_store_"]
45
+ io_keys = ["global_load", "global_store", "buffer_load", "buffer_store"]
46
46
  return process_amd_bufferop(ir_content, io_keys)
47
47
 
48
48
 
@@ -66,9 +66,12 @@ def _generate_ir_analysis(entry: str):
66
66
  gcn_bufferops_info = process_amd_gcn_bufferops(
67
67
  amdgcn_key, file_content, file_path
68
68
  )
69
+ io_counts = {}
69
70
  # NDJSON format requires a newline at the end of each line
70
71
  if ttgir_bufferops_info:
71
- ir_analysis["amd_ttgir_bufferops_count"] = ttgir_bufferops_info
72
+ io_counts["amd_ttgir_bufferops_count"] = ttgir_bufferops_info
72
73
  if gcn_bufferops_info:
73
- ir_analysis["amd_gcn_bufferops_count"] = gcn_bufferops_info
74
- return {"ir_analysis": ir_analysis}
74
+ io_counts["amd_gcn_bufferops_count"] = gcn_bufferops_info
75
+ if io_counts:
76
+ ir_analysis["io_counts"] = io_counts
77
+ return ir_analysis
tritonparse/ir_parser.py CHANGED
@@ -10,7 +10,8 @@ logger = logging.getLogger("SourceMapping")
10
10
 
11
11
  # the definition of the #loc directive. they are in the bottom of the IR files
12
12
  # Example:#loc2 = loc("/tmp/torchinductor_yhao/yp/abcdef.py":20:28)
13
- LOC_PATTERN = re.compile(r'#loc(\d*) = loc\("([^"]+)":(\d+):(\d+)\)')
13
+ # Note: This should only match numbered locs like #loc1, #loc2, not bare #loc
14
+ LOC_PATTERN = re.compile(r'#loc(\d+) = loc\("([^"]+)":(\d+):(\d+)\)')
14
15
 
15
16
  # the reference to the #loc directive. they are in the end of lines of the IR files
16
17
  # Example: loc(#loc2)
@@ -33,6 +34,17 @@ AMDGCN_LOC_PATTERN = re.compile(
33
34
  )
34
35
 
35
36
 
37
+ # alias loc definitions in TTGIR/TTIR
38
+ # Example: #loc16 = loc("pid"(#loc2))
39
+ # Example: #loc13 = loc("x_ptr"(#loc)) - bare #loc without number
40
+ ALIAS_WITH_NAME_PATTERN = re.compile(
41
+ r'#loc(\d+)\s*=\s*loc\("([^"]+)"\s*\(\s*#loc(\d*)\s*\)\s*\)'
42
+ )
43
+
44
+ # Example: #loc20 = loc(#loc16)
45
+ ALIAS_SIMPLE_PATTERN = re.compile(r"#loc(\d+)\s*=\s*loc\(\s*#loc(\d*)\s*\)")
46
+
47
+
36
48
  def extract_loc_definitions(ir_content: str) -> Dict[str, Dict[str, Any]]:
37
49
  """
38
50
  Extracts location definitions from the given IR content.
@@ -50,9 +62,10 @@ def extract_loc_definitions(ir_content: str) -> Dict[str, Dict[str, Any]]:
50
62
  """
51
63
  locations = {}
52
64
  # The first #loc directive is a special case. It locates at the top of the IR files
65
+ # Store it with empty string "" as key to avoid conflict with #loc1
53
66
  main_match = re.search(r'#loc = loc\("([^"]+)":(\d+):(\d+)\)', ir_content)
54
67
  if main_match:
55
- locations["1"] = {
68
+ locations[""] = {
56
69
  "file": main_match.group(1),
57
70
  "line": int(main_match.group(2)),
58
71
  "column": int(main_match.group(3)),
@@ -61,6 +74,84 @@ def extract_loc_definitions(ir_content: str) -> Dict[str, Dict[str, Any]]:
61
74
  for loc_id, filename, line, col in LOC_PATTERN.findall(ir_content):
62
75
  key = loc_id
63
76
  locations[key] = {"file": filename, "line": int(line), "column": int(col)}
77
+
78
+ # Handle alias-style loc definitions that reference another #loc
79
+ # Build alias map first: alias_id -> target_id
80
+ alias_map: Dict[str, str] = {}
81
+ for m in ALIAS_WITH_NAME_PATTERN.finditer(ir_content):
82
+ alias_id, _name, target_id = m.groups()
83
+ # Empty target_id means bare #loc, map to "" (main loc key)
84
+ alias_map[alias_id] = target_id or ""
85
+ for m in ALIAS_SIMPLE_PATTERN.finditer(ir_content):
86
+ alias_id, target_id = m.groups()
87
+ # Empty target_id means bare #loc, map to "" (main loc key)
88
+ alias_map[alias_id] = target_id or ""
89
+
90
+ # Build definition line map and alias name map by scanning lines
91
+ def_line_map: Dict[str, int] = {}
92
+ alias_name_map: Dict[str, str] = {}
93
+ main_loc_line: int = 0
94
+ for i, line in enumerate(ir_content.split("\n"), start=1):
95
+ if m := ALIAS_WITH_NAME_PATTERN.search(line):
96
+ alias_id, name, target_id = m.groups()
97
+ def_line_map[alias_id] = i
98
+ alias_name_map[alias_id] = name
99
+ # ensure alias map is populated even if only found in line scan
100
+ # Empty target_id means bare #loc, map to "" (main loc key)
101
+ alias_map.setdefault(alias_id, target_id or "")
102
+ elif m := ALIAS_SIMPLE_PATTERN.search(line):
103
+ alias_id, target_id = m.groups()
104
+ def_line_map[alias_id] = i
105
+ # Empty target_id means bare #loc, map to "" (main loc key)
106
+ alias_map.setdefault(alias_id, target_id or "")
107
+ if m2 := LOC_PATTERN.search(line):
108
+ base_id, _fn, _ln, _col = m2.groups()
109
+ def_line_map[base_id] = i
110
+ if re.search(r'#loc\s*=\s*loc\("[^"]+":\d+:\d+\)', line):
111
+ # main #loc = loc("file":line:col) without id
112
+ main_loc_line = main_loc_line or i
113
+
114
+ # Resolve aliases to base locations (file/line/column)
115
+ resolving_stack = set()
116
+
117
+ def resolve_alias(current_id: str) -> Dict[str, Any]:
118
+ # Already a concrete location
119
+ if current_id in locations:
120
+ return locations[current_id]
121
+ # Detect cycles
122
+ if current_id in resolving_stack:
123
+ return {}
124
+ resolving_stack.add(current_id)
125
+ parent_id = alias_map.get(current_id)
126
+ result: Dict[str, Any] = {}
127
+ if parent_id is not None:
128
+ base = resolve_alias(parent_id)
129
+ if base:
130
+ # copy to avoid sharing the same dict by reference
131
+ result = {
132
+ "file": base.get("file"),
133
+ "line": base.get("line"),
134
+ "column": base.get("column"),
135
+ }
136
+ locations[current_id] = result
137
+ resolving_stack.remove(current_id)
138
+ return result
139
+
140
+ # Resolve aliases and attach alias metadata
141
+ for alias_id, target_id in alias_map.items():
142
+ if alias_id not in locations:
143
+ resolve_alias(alias_id)
144
+ if alias_id in locations:
145
+ locations[alias_id]["alias_of"] = target_id
146
+ if alias_id in alias_name_map:
147
+ locations[alias_id]["alias_name"] = alias_name_map[alias_id]
148
+
149
+ # Attach definition line metadata
150
+ for k, v in def_line_map.items():
151
+ if k in locations:
152
+ locations[k]["def_line"] = v
153
+ if main_loc_line and "" in locations:
154
+ locations[""]["def_line"] = main_loc_line
64
155
  return locations
65
156
 
66
157
 
@@ -40,7 +40,7 @@ def get_launch_and_compilation_events(
40
40
 
41
41
  Args:
42
42
  events: List of parsed event dictionaries.
43
- line_index: Index of the launch event to process.
43
+ line_index: 0-based index of the launch event to process.
44
44
 
45
45
  Returns:
46
46
  Tuple of (launch_event, compilation_event).
@@ -179,7 +179,7 @@ def build_context_bundle(
179
179
 
180
180
  Args:
181
181
  events: List of parsed event dictionaries.
182
- line_index: Index of the launch event to process.
182
+ line_index: 0-based index of the launch event to process.
183
183
 
184
184
  Returns:
185
185
  ContextBundle containing all information needed to reproduce the kernel launch.
@@ -29,7 +29,7 @@ def reproduce(
29
29
 
30
30
  Args:
31
31
  input_path: Path to the NDJSON trace file.
32
- line_index: Line index of the launch event to reproduce.
32
+ line_index: 0-based index of the launch event to reproduce in the events list.
33
33
  out_dir: Output directory for reproducer files.
34
34
  template: Template name to use for the reproducer.
35
35
  replacer: Optional custom PlaceholderReplacer instance. If None, uses DefaultPlaceholderReplacer.
@@ -71,12 +71,38 @@ def generate_source_mappings(
71
71
  }
72
72
  elif loc_id in loc_defs:
73
73
  info = loc_defs[loc_id]
74
- mappings[str(ln)] = {
74
+ entry = {
75
75
  "file": info["file"],
76
76
  "line": info["line"],
77
77
  "column": info["column"],
78
78
  f"{ir_type}_line": ln,
79
79
  }
80
+ # Propagate alias metadata if present
81
+ if "alias_name" in info:
82
+ entry["alias_name"] = info["alias_name"]
83
+ if "alias_of" in info:
84
+ entry["loc_id"] = loc_id
85
+ mappings[str(ln)] = entry
86
+
87
+ # Add separate entries for loc definition lines
88
+ for loc_id, info in loc_defs.items():
89
+ if "def_line" not in info:
90
+ continue
91
+ def_ln = info["def_line"]
92
+ # Only create mapping if this line doesn't already have one
93
+ if str(def_ln) not in mappings:
94
+ entry = {
95
+ "file": info["file"],
96
+ "line": info["line"],
97
+ "column": info["column"],
98
+ f"{ir_type}_line": def_ln,
99
+ "kind": "loc_def",
100
+ }
101
+ if "alias_name" in info:
102
+ entry["alias_name"] = info["alias_name"]
103
+ if "alias_of" in info:
104
+ entry["loc_id"] = loc_id
105
+ mappings[str(def_ln)] = entry
80
106
 
81
107
  return mappings
82
108
 
@@ -301,8 +327,13 @@ def parse_single_file(
301
327
  )
302
328
 
303
329
  if compilation_event:
304
- ir_analysis_event = _generate_ir_analysis(compilation_event)
305
- if ir_analysis_event:
330
+ ir_analysis = _generate_ir_analysis(compilation_event)
331
+ if ir_analysis:
332
+ ir_analysis_event = {
333
+ "event_type": "ir_analysis",
334
+ "hash": _kernel_hash,
335
+ "ir_analysis": ir_analysis,
336
+ }
306
337
  all_output_lines[output_file].append(
307
338
  json.dumps(ir_analysis_event, separators=(",", ":")) + "\n"
308
339
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tritonparse
3
- Version: 0.3.1.dev20251026071454
3
+ Version: 0.3.1.dev20251028071524
4
4
  Summary: TritonParse: A Compiler Tracer, Visualizer, and mini-Reproducer Generator for Triton Kernels
5
5
  Author-email: Yueming Hao <yhao@meta.com>
6
6
  License-Expression: BSD-3-Clause
@@ -134,7 +134,7 @@ from tritonparse.reproducer.orchestrator import reproduce
134
134
 
135
135
  result = reproduce(
136
136
  input_path="./parsed_output/trace.ndjson.gz",
137
- line_index=1, # Which launch event (1-based)
137
+ line_index=0, # 0-based index (first event is 0)
138
138
  out_dir="repro_output"
139
139
  )
140
140
  ```
@@ -1,28 +1,28 @@
1
1
  tritonparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  tritonparse/__main__.py,sha256=RXbkALBewcb1xlJBnsQl9IaBRUNln7U8NuRZKT8UdIk,117
3
- tritonparse/cli.py,sha256=w_sCIv1NlcGDzoflsYuTvtQis92_YuZEC51ZlTVsYCU,2517
3
+ tritonparse/cli.py,sha256=Z3nz_rGYXF6NKmI3LsTomWqM51sJ8Tp-ybxRq7l20BI,2569
4
4
  tritonparse/common.py,sha256=MJo9bVCgSKkwXpEoUkUczPo_5jOYpJgXLq4UsWYqN3c,13924
5
5
  tritonparse/context_manager.py,sha256=OdMn11qbApYL2c9IlbUpcT27r04ZSa4DfvrY2mLA958,2243
6
6
  tritonparse/event_diff.py,sha256=USCjfjYr-7Ie-EfZgtCFMZMA1KRzFRDe7yDFy98zYI4,4962
7
7
  tritonparse/extract_source_mappings.py,sha256=Z6UxFj2cCE5NCWLQTYPKqUpLfbYhqP8xgCl5mvud9KI,1451
8
- tritonparse/ir_analysis.py,sha256=YObFsRKkpdjkwL3lIeWRXU6DY13fGVqdh8li4PiCpoQ,2482
9
- tritonparse/ir_parser.py,sha256=Qtb8kA6IMBtbLbABEGSDFdGQQI_GsPd1iC2GHbEviaI,9379
8
+ tritonparse/ir_analysis.py,sha256=MoOXuHsUGZ705R4JnXmlsrBn9gJdLO1Dnf0L5AxcaBM,2551
9
+ tritonparse/ir_parser.py,sha256=MH4RwoNZMBdWUxkFyEhemJ7Aa7-asoba66b06bGPNsk,13237
10
10
  tritonparse/mapper.py,sha256=QBCUMHM9pu3x3ahFp0wyXJbmv9TFGVPdkcLULok1E-k,4205
11
11
  tritonparse/shared_vars.py,sha256=RifXq55KisHgspYAmGcaCWY6ZHX8iejFHvwIewvcWZE,707
12
12
  tritonparse/source_type.py,sha256=nmYEQS8rfkIN9BhNhQbkmEvKnvS-3zAxRGLY4TaZdi8,1676
13
13
  tritonparse/sourcemap_utils.py,sha256=uI02n5Sgnlx7Nc15QAX5N6_tZZMips0PyJuo1n3eouY,2654
14
14
  tritonparse/structured_logging.py,sha256=L1xkkCx8Jr9YQbM0Kgtf2g6L3aWMkYOEeFFEOSo8Lkk,60306
15
15
  tritonparse/tp_logger.py,sha256=vXzY7hMDmVnRBGBhIjFZe3nHZzG5NKKPONGUszJhGgU,242
16
- tritonparse/trace_processor.py,sha256=cJHagBOSHFpvUKYLxrvtgoFODDZ5CEFkU_xZTKsD9s8,12981
16
+ tritonparse/trace_processor.py,sha256=aQPqlnpTtWoGzHYv4BXWUH4nCeUQGSK3o-fj0LD9I0c,14147
17
17
  tritonparse/utils.py,sha256=Jnlptcd79llSDev-_1XyyOnv2izUqv0PEL74A8GF2tc,4565
18
18
  tritonparse/reproducer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  tritonparse/reproducer/cli.py,sha256=wk0K8qJhvP9gty2EBMH3WEc3TSFcszvNq3JbfYu_sTw,1577
20
20
  tritonparse/reproducer/function_extractor.py,sha256=kQr10JKHy8EvAN7ic4Azjz6TYe-udBW2DVmbQ--c1pc,6643
21
- tritonparse/reproducer/orchestrator.py,sha256=h55xXEVYtIr33GvwCuvM03bsYc01_yPa5X_B-3w71kE,3237
21
+ tritonparse/reproducer/orchestrator.py,sha256=GotBOJjrShN1oCFc_xTMXn8WWT1Jlfap5qcM21dKBpM,3259
22
22
  tritonparse/reproducer/placeholder_replacer.py,sha256=ARPZAa9A3Fyit_dIclOKe1JzFgUPBFdHvfy3z20x2E8,9607
23
23
  tritonparse/reproducer/types.py,sha256=86wql3NaGgpkOzx0gDFb5qexNjKExzhL0uIwGU7grrw,564
24
24
  tritonparse/reproducer/utils.py,sha256=yFS1Mg2IhRgW-1UNfqjWH5gRSqc8Wbn5Ykre8L-EWcU,16599
25
- tritonparse/reproducer/ingestion/ndjson.py,sha256=QX_4XyK_vhSOFyJtfDxQNwzXqVXolrfk4cefzlShFps,7427
25
+ tritonparse/reproducer/ingestion/ndjson.py,sha256=7amSwpbtG-od1-pW18Nm9AiaFc3Etd0-UETXwiYCmgw,7443
26
26
  tritonparse/reproducer/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  tritonparse/reproducer/templates/example.py,sha256=jR3c8_d7fAFJYaj1DuUuthnI4Xd-_606bWDRdUPMNyo,785
28
28
  tritonparse/reproducer/templates/loader.py,sha256=x14KHXkovOIcXFKii3Jx4XjpEhXqUMqp575qAffi370,1975
@@ -33,9 +33,9 @@ tritonparse/tools/format_fix.py,sha256=ISalg_N_L7Xktag3mLr-G9T6Opxv793s1WG6A9wUt
33
33
  tritonparse/tools/load_tensor.py,sha256=7-LbpboKDNJFBLNhiKS3enoqRlVAb55OjPc70PwHXAw,2789
34
34
  tritonparse/tools/prettify_ndjson.py,sha256=kR8hmBCv-iJeuzpi2_6CZv9T4_edRQbBOSOPpMm6wrw,11117
35
35
  tritonparse/tools/readme.md,sha256=w6PWYfYnRgoPArLjxG9rVrpcLUkoVMGuRlbpF-o0IQM,110
36
- tritonparse-0.3.1.dev20251026071454.dist-info/licenses/LICENSE,sha256=4ZciugpyN7wcM4L-9pyDh_etvMUeIfBhDTyH1zeZlQM,1515
37
- tritonparse-0.3.1.dev20251026071454.dist-info/METADATA,sha256=R9VU8RVaqA-dbT5qelFdNzsWoKrYaYEjho_6L8oI8NU,8278
38
- tritonparse-0.3.1.dev20251026071454.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
- tritonparse-0.3.1.dev20251026071454.dist-info/entry_points.txt,sha256=wEXdaieDoRRCCdhEv2p_C68iytnaXU_2pwt5CqjfbWY,56
40
- tritonparse-0.3.1.dev20251026071454.dist-info/top_level.txt,sha256=ITcTKgp3vf_bXV9vixuQU9IrZa3L1EfDSZwvRzRaoJU,12
41
- tritonparse-0.3.1.dev20251026071454.dist-info/RECORD,,
36
+ tritonparse-0.3.1.dev20251028071524.dist-info/licenses/LICENSE,sha256=4ZciugpyN7wcM4L-9pyDh_etvMUeIfBhDTyH1zeZlQM,1515
37
+ tritonparse-0.3.1.dev20251028071524.dist-info/METADATA,sha256=6PkpYMi1Qjf4Lar46WHsMPBo6dQts6i6n3IcwmijYeg,8282
38
+ tritonparse-0.3.1.dev20251028071524.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
+ tritonparse-0.3.1.dev20251028071524.dist-info/entry_points.txt,sha256=wEXdaieDoRRCCdhEv2p_C68iytnaXU_2pwt5CqjfbWY,56
40
+ tritonparse-0.3.1.dev20251028071524.dist-info/top_level.txt,sha256=ITcTKgp3vf_bXV9vixuQU9IrZa3L1EfDSZwvRzRaoJU,12
41
+ tritonparse-0.3.1.dev20251028071524.dist-info/RECORD,,