tritonparse 0.2.4.dev20251003071457__py3-none-any.whl → 0.2.4.dev20251005071455__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tritonparse might be problematic. Click here for more details.

tritonparse/common.py CHANGED
@@ -262,6 +262,7 @@ def parse_logs(
262
262
  rank_config: RankConfig,
263
263
  verbose: bool = False,
264
264
  tritonparse_url_prefix: str = "",
265
+ split_inductor_compilations: bool = True,
265
266
  ) -> Tuple[str, dict]:
266
267
  """
267
268
  Parse logs.
@@ -271,7 +272,9 @@ def parse_logs(
271
272
  rank_config: Rank configuration
272
273
  verbose: Whether to print verbose information
273
274
  tritonparse_url_prefix: URL prefix for the generated file mapping
274
-
275
+ split_inductor_compilations: Whether to split
276
+ output files by frame_id, compile_id, attempt_id, and compiled_autograd_id.
277
+ Defaults to True. This rule follows tlparse's behavior.
275
278
  Returns:
276
279
  Tuple of (parsed log directory, file mapping)
277
280
  """
@@ -327,7 +330,7 @@ def parse_logs(
327
330
  relative_path = rank.to_string("")
328
331
  output_dir = os.path.join(parsed_log_dir, relative_path)
329
332
  # Parse the file
330
- parse_single_file(input_file, output_dir)
333
+ parse_single_file(input_file, output_dir, split_inductor_compilations)
331
334
  # Collect generated files after parsing and gzip them immediately
332
335
  if os.path.exists(output_dir):
333
336
  generated_files = []
@@ -2,8 +2,10 @@ import os
2
2
  import shutil
3
3
  import tempfile
4
4
 
5
- from .structured_logging import clear_logging_config, init
6
- from .utils import unified_parse
5
+ from tritonparse.shared_vars import TEST_KEEP_OUTPUT
6
+
7
+ from tritonparse.structured_logging import clear_logging_config, init
8
+ from tritonparse.utils import unified_parse
7
9
 
8
10
 
9
11
  def createUniqueTempDirectory():
@@ -11,13 +13,38 @@ def createUniqueTempDirectory():
11
13
 
12
14
 
13
15
  class TritonParseManager:
16
+ def __init__(
17
+ self,
18
+ enable_trace_launch=False,
19
+ split_inductor_compilations=True,
20
+ **parse_kwargs,
21
+ ):
22
+ """
23
+ Context manager for tritonparse workflow.
24
+
25
+ Args:
26
+ enable_trace_launch: Whether to enable trace launch
27
+ split_inductor_compilations: Whether to split inductor compilations in the output
28
+ **parse_kwargs: Additional keyword arguments to pass to unified_parse
29
+ """
30
+ self.enable_trace_launch = enable_trace_launch
31
+ self.split_inductor_compilations = split_inductor_compilations
32
+ self.parse_kwargs = parse_kwargs
33
+ self.dir_path = None
34
+ self.output_link = None
35
+
14
36
  def __enter__(self):
15
37
  self.dir_path = createUniqueTempDirectory()
16
- init(self.dir_path)
38
+ init(self.dir_path, enable_trace_launch=self.enable_trace_launch)
17
39
  return self
18
40
 
19
41
  def __exit__(self, exc_type, exc_val, exc_tb):
20
- self.output_link = unified_parse(source=self.dir_path, overwrite=True)
42
+ self.output_link = unified_parse(
43
+ source=self.dir_path,
44
+ overwrite=True,
45
+ split_inductor_compilations=self.split_inductor_compilations,
46
+ **self.parse_kwargs,
47
+ )
21
48
  clear_logging_config()
22
- if os.path.exists(self.dir_path):
49
+ if os.path.exists(self.dir_path) and not TEST_KEEP_OUTPUT:
23
50
  shutil.rmtree(self.dir_path)
@@ -1,13 +1,13 @@
1
1
  from pathlib import Path
2
+ from typing import Optional
2
3
 
3
4
  from tritonparse.reproducer.ingestion.ndjson import build_context_bundle
4
- from tritonparse.reproducer.templates.loader import load_template_code
5
- from tritonparse.reproducer.utils import (
6
- _generate_import_statements,
7
- _generate_invocation_snippet,
8
- _parse_kernel_signature,
9
- determine_output_paths,
5
+ from tritonparse.reproducer.placeholder_replacer import (
6
+ DefaultPlaceholderReplacer,
7
+ PlaceholderReplacer,
10
8
  )
9
+ from tritonparse.reproducer.templates.loader import load_template_code
10
+ from tritonparse.reproducer.utils import determine_output_paths
11
11
 
12
12
  from tritonparse.tools.prettify_ndjson import load_ndjson, save_prettified_json
13
13
  from tritonparse.tp_logger import logger
@@ -18,6 +18,7 @@ def reproduce(
18
18
  line_index: int,
19
19
  out_dir: str,
20
20
  template: str,
21
+ replacer: Optional[PlaceholderReplacer] = None,
21
22
  ) -> dict[str, Path]:
22
23
  """
23
24
  Generate a reproducer script from NDJSON trace file.
@@ -26,6 +27,8 @@ def reproduce(
26
27
  input_path: Path to the NDJSON trace file.
27
28
  line_index: Line index of the launch event to reproduce.
28
29
  out_dir: Output directory for reproducer files.
30
+ template: Template name to use for the reproducer.
31
+ replacer: Optional custom PlaceholderReplacer instance. If None, uses DefaultPlaceholderReplacer.
29
32
  """
30
33
  logger.debug(f"Building bundle from {input_path} at line {line_index}")
31
34
  events = load_ndjson(Path(input_path))
@@ -42,18 +45,15 @@ def reproduce(
42
45
  save_prettified_json(context_bundle.raw_launch_event, temp_json_path)
43
46
  logger.debug("Loading reproducer template.")
44
47
  template_code = load_template_code(template)
45
- final_code = template_code.replace(
46
- "{{JSON_FILE_NAME_PLACEHOLDER}}", temp_json_path.name
47
- )
48
- sys_stmt, import_statement = _generate_import_statements(context_bundle.kernel_info)
49
- final_code = final_code.replace("# {{KERNEL_SYSPATH_PLACEHOLDER}}", sys_stmt)
50
- final_code = final_code.replace("# {{KERNEL_IMPORT_PLACEHOLDER}}", import_statement)
51
- source_code = context_bundle.kernel_info.source_code
52
- pos_args, kw_args = _parse_kernel_signature(source_code)
53
- invocation_snippet = _generate_invocation_snippet(pos_args, kw_args)
54
- final_code = final_code.replace(
55
- "# {{KERNEL_INVOCATION_PLACEHOLDER}}", invocation_snippet
48
+
49
+ # Use PlaceholderReplacer to replace all placeholders
50
+ # If no custom replacer provided, use the default one
51
+ if replacer is None:
52
+ replacer = DefaultPlaceholderReplacer()
53
+ final_code = replacer.replace(
54
+ template_code, context_bundle, temp_json_path=temp_json_path
56
55
  )
56
+
57
57
  out_py_path.write_text(final_code, encoding="utf-8")
58
58
 
59
59
  filepath = context_bundle.kernel_info.file_path
@@ -0,0 +1,115 @@
1
+ from abc import ABC
2
+
3
+ from typing import Any, Dict, Protocol
4
+
5
+ from tritonparse.reproducer.ingestion.ndjson import ContextBundle
6
+ from tritonparse.reproducer.utils import (
7
+ _generate_import_statements,
8
+ _generate_invocation_snippet,
9
+ _parse_kernel_signature,
10
+ )
11
+
12
+
13
+ class HandlerProtocol(Protocol):
14
+ def __call__(
15
+ self, code: str, context_bundle: ContextBundle, **kwargs: Any
16
+ ) -> str: ...
17
+
18
+
19
+ class PlaceholderReplacer(ABC):
20
+ """
21
+ Abstract base class for template placeholder replacement.
22
+
23
+ Subclasses should register replacement handlers in their __init__ method
24
+ by calling self.register(placeholder, handler_function).
25
+
26
+ Each handler function should have the signature:
27
+ handler(code: str, context_bundle: ContextBundle, **kwargs) -> str
28
+ """
29
+
30
+ def __init__(self):
31
+ # Dictionary mapping placeholder strings to handler functions
32
+ self.handlers: Dict[str, HandlerProtocol] = {}
33
+
34
+ def register(self, placeholder: str, handler: HandlerProtocol):
35
+ """
36
+ Register a handler function for a specific placeholder.
37
+
38
+ Args:
39
+ placeholder: The placeholder string to replace (e.g., "{{JSON_FILE_NAME_PLACEHOLDER}}")
40
+ handler: A callable that takes (code, context_bundle, **kwargs) and returns modified code
41
+ """
42
+ self.handlers[placeholder] = handler
43
+
44
+ def replace(
45
+ self, template_code: str, context_bundle: ContextBundle, **kwargs: Any
46
+ ) -> str:
47
+ """
48
+ Replace all registered placeholders in the template code.
49
+
50
+ Args:
51
+ template_code: The template code containing placeholders
52
+ context_bundle: Context information about the kernel
53
+ **kwargs: Additional keyword arguments passed to handler functions
54
+
55
+ Returns:
56
+ The code with all placeholders replaced
57
+ """
58
+ code = template_code
59
+ for placeholder, handler in self.handlers.items():
60
+ code = handler(code, context_bundle, **kwargs)
61
+ return code
62
+
63
+
64
+ class DefaultPlaceholderReplacer(PlaceholderReplacer):
65
+ """
66
+ Default implementation of PlaceholderReplacer.
67
+
68
+ Handles the following placeholders:
69
+ - {{JSON_FILE_NAME_PLACEHOLDER}}: Replaced with the JSON file name
70
+ - # {{KERNEL_SYSPATH_PLACEHOLDER}}: Replaced with sys.path setup code
71
+ - # {{KERNEL_IMPORT_PLACEHOLDER}}: Replaced with kernel import statement
72
+ - # {{KERNEL_INVOCATION_PLACEHOLDER}}: Replaced with kernel invocation code
73
+ """
74
+
75
+ def __init__(self):
76
+ super().__init__()
77
+ # Register all default handlers
78
+ self.register("{{JSON_FILE_NAME_PLACEHOLDER}}", self._replace_json_filename)
79
+ self.register("# {{KERNEL_SYSPATH_PLACEHOLDER}}", self._replace_kernel_syspath)
80
+ self.register("# {{KERNEL_IMPORT_PLACEHOLDER}}", self._replace_kernel_import)
81
+ self.register(
82
+ "# {{KERNEL_INVOCATION_PLACEHOLDER}}", self._replace_kernel_invocation
83
+ )
84
+
85
+ def _replace_json_filename(
86
+ self, code: str, context_bundle: ContextBundle, **kwargs
87
+ ) -> str:
88
+ """Replace the JSON file name placeholder."""
89
+ temp_json_path = kwargs.get("temp_json_path")
90
+ if temp_json_path is None:
91
+ raise ValueError("temp_json_path is required for JSON filename replacement")
92
+ return code.replace("{{JSON_FILE_NAME_PLACEHOLDER}}", temp_json_path.name)
93
+
94
+ def _replace_kernel_syspath(
95
+ self, code: str, context_bundle: ContextBundle, **kwargs
96
+ ) -> str:
97
+ """Replace the kernel sys.path placeholder."""
98
+ sys_stmt, _ = _generate_import_statements(context_bundle.kernel_info)
99
+ return code.replace("# {{KERNEL_SYSPATH_PLACEHOLDER}}", sys_stmt)
100
+
101
+ def _replace_kernel_import(
102
+ self, code: str, context_bundle: ContextBundle, **kwargs
103
+ ) -> str:
104
+ """Replace the kernel import placeholder."""
105
+ _, import_statement = _generate_import_statements(context_bundle.kernel_info)
106
+ return code.replace("# {{KERNEL_IMPORT_PLACEHOLDER}}", import_statement)
107
+
108
+ def _replace_kernel_invocation(
109
+ self, code: str, context_bundle: ContextBundle, **kwargs
110
+ ) -> str:
111
+ """Replace the kernel invocation placeholder."""
112
+ source_code = context_bundle.kernel_info.source_code
113
+ pos_args, kw_args = _parse_kernel_signature(source_code)
114
+ invocation_snippet = _generate_invocation_snippet(pos_args, kw_args)
115
+ return code.replace("# {{KERNEL_INVOCATION_PLACEHOLDER}}", invocation_snippet)
File without changes
@@ -88,7 +88,13 @@ def load_tensor(tensor_file_path: str, device: str = None) -> torch.Tensor:
88
88
  raise RuntimeError(f"Failed to load tensor from {blob_path}: {str(e)}") from e
89
89
 
90
90
 
91
- def create_args_from_json(json_path):
91
+ def create_args_from_json_file(json_path):
92
+ with open(json_path, "r") as f:
93
+ data = json.load(f)
94
+ return create_args_from_json(data)
95
+
96
+
97
+ def create_args_from_json(data):
92
98
  """
93
99
  Parse a reproducer JSON and build kernel grid and argument dictionary.
94
100
 
@@ -98,8 +104,6 @@ def create_args_from_json(json_path):
98
104
  Returns:
99
105
  tuple[list, dict]: Grid specification list and map of argument name to value.
100
106
  """
101
- with open(json_path, "r") as f:
102
- data = json.load(f)
103
107
  # Handle data format validation and extraction
104
108
  if isinstance(data, list):
105
109
  if len(data) != 1:
@@ -139,7 +143,10 @@ def _create_arg_from_info(arg_info):
139
143
  """
140
144
  arg_type = arg_info.get("type")
141
145
 
142
- if arg_type in ["int", "bool"]:
146
+ if arg_type == "NoneType":
147
+ return None
148
+
149
+ if arg_type in ["int", "bool", "str", "float"]:
143
150
  return arg_info.get("value")
144
151
 
145
152
  elif arg_type == "tensor":
@@ -300,7 +307,7 @@ def _create_arg_from_info(arg_info):
300
307
  if __name__ == "__main__":
301
308
  script_dir = Path(__file__).resolve().parent
302
309
  json_file = script_dir / "{{JSON_FILE_NAME_PLACEHOLDER}}"
303
- grid, args_dict = create_args_from_json(str(json_file))
310
+ grid, args_dict = create_args_from_json_file(str(json_file))
304
311
 
305
312
  print("Generated kernel arguments dictionary:")
306
313
  for name, arg in args_dict.items():
@@ -7,3 +7,5 @@ DEFAULT_TRACE_FILE_PREFIX = (
7
7
  f"dedicated_log_triton_trace_{os.getenv('USER', 'unknown')}_"
8
8
  )
9
9
  DEFAULT_TRACE_FILE_PREFIX_WITHOUT_USER = "dedicated_log_triton_trace_"
10
+ # Return True if test outputs (e.g., temp dirs) should be preserved.
11
+ TEST_KEEP_OUTPUT = os.getenv("TEST_KEEP_OUTPUT", "0") in ["1", "true", "True"]
@@ -273,12 +273,15 @@ def _log_torch_tensor_info(tensor_value):
273
273
  arg_info["data_ptr"] = hex(tensor_value.data_ptr())
274
274
  if TRITONPARSE_MORE_TENSOR_INFORMATION:
275
275
  try:
276
- arg_info["min"] = tensor_value.min().item()
277
- arg_info["max"] = tensor_value.max().item()
278
- arg_info["mean"] = tensor_value.float().mean().item()
279
- arg_info["std"] = tensor_value.float().std().item()
276
+ # Convert to float for reliable statistics computation across all dtypes
277
+ # This creates a new tensor without modifying the original
278
+ float_tensor = tensor_value.float()
279
+ arg_info["min"] = float_tensor.min().item()
280
+ arg_info["max"] = float_tensor.max().item()
281
+ arg_info["mean"] = float_tensor.mean().item()
282
+ arg_info["std"] = float_tensor.std().item()
280
283
  except (RuntimeError, ValueError, TypeError) as e:
281
- log.error(f"Error computing additional tensor statistics: {e}")
284
+ log.error(f"Unable to compute tensor statistics: {e}")
282
285
  arg_info["tensor_capture_error"] = str(e)
283
286
  return arg_info
284
287
 
@@ -1161,11 +1164,8 @@ def init(
1161
1164
  It only works when enable_trace_launch/TRITON_TRACE_LAUNCH is True.
1162
1165
  enable_sass_dump (Optional[bool]): Whether to enable SASS dumping.
1163
1166
  """
1164
- global \
1165
- TRITON_TRACE_LAUNCH, \
1166
- TRITONPARSE_MORE_TENSOR_INFORMATION, \
1167
- TORCHINDUCTOR_RUN_JIT_POST_COMPILE_HOOK, \
1168
- TRITONPARSE_DUMP_SASS
1167
+ global TRITON_TRACE_LAUNCH, TRITONPARSE_MORE_TENSOR_INFORMATION
1168
+ global TORCHINDUCTOR_RUN_JIT_POST_COMPILE_HOOK, TRITONPARSE_DUMP_SASS
1169
1169
  if enable_trace_launch:
1170
1170
  TRITON_TRACE_LAUNCH = True
1171
1171
  TORCHINDUCTOR_RUN_JIT_POST_COMPILE_HOOK = True
@@ -198,7 +198,7 @@ def parse_single_trace_content(trace_content: str) -> str:
198
198
  def parse_single_file(
199
199
  file_path: str,
200
200
  output_dir: str = None,
201
- split_by_frame_id_and_compile_id: bool = True,
201
+ split_inductor_compilations: bool = True,
202
202
  ):
203
203
  """
204
204
  Process a single file, correctly group events by kernel, and extract mappings.
@@ -210,8 +210,9 @@ def parse_single_file(
210
210
  Args:
211
211
  file_path (str): The path to the file to be processed.
212
212
  output_dir (str, optional): Directory to save the output files.
213
- split_by_frame_id_and_compile_id (bool, optional): Whether to split
214
- output files by frame_id and compile_id. Defaults to True.
213
+ split_inductor_compilations (bool, optional): Whether to split
214
+ output files by frame_id, compile_id, attempt_id, and compiled_autograd_id.
215
+ Defaults to True. This rule follows tlparse's behavior.
215
216
  """
216
217
  kernels_by_hash = defaultdict(
217
218
  lambda: {"compilation": None, "launches": [], "output_file": None}
@@ -253,7 +254,9 @@ def parse_single_file(
253
254
  if not kernel_hash:
254
255
  continue
255
256
 
256
- if split_by_frame_id_and_compile_id:
257
+ # Split inductor compilations into separate files
258
+ # This rule follows tlparse's behavior.
259
+ if split_inductor_compilations:
257
260
  pt_info = payload.get("pt_info", {})
258
261
  frame_id = pt_info.get("frame_id")
259
262
  frame_compile_id = pt_info.get("frame_compile_id")
tritonparse/utils.py CHANGED
@@ -59,6 +59,7 @@ def oss_run(
59
59
  rank: Optional[int] = None,
60
60
  all_ranks: bool = False,
61
61
  verbose: bool = False,
62
+ split_inductor_compilations: bool = True,
62
63
  ):
63
64
  """
64
65
  Main function for tritonparse. It is for OSS only.
@@ -97,7 +98,12 @@ def oss_run(
97
98
  # Copy the single file to a temp directory, then parse it
98
99
  logs = copy_local_to_tmpdir(local_path, verbose)
99
100
 
100
- parsed_log_dir, _ = parse_logs(logs, rank_config, verbose)
101
+ parsed_log_dir, _ = parse_logs(
102
+ logs,
103
+ rank_config,
104
+ verbose,
105
+ split_inductor_compilations=split_inductor_compilations,
106
+ )
101
107
  if out is not None:
102
108
  save_logs(Path(out), parsed_log_dir, overwrite, verbose)
103
109
  # Print beautiful summary of all parsed files
@@ -116,6 +122,7 @@ def unified_parse(
116
122
  rank: Optional[int] = None,
117
123
  all_ranks: bool = False,
118
124
  verbose: bool = False,
125
+ split_inductor_compilations: bool = True,
119
126
  **kwargs,
120
127
  ):
121
128
  """
@@ -142,6 +149,7 @@ def unified_parse(
142
149
  rank=rank,
143
150
  all_ranks=all_ranks,
144
151
  verbose=verbose,
152
+ split_inductor_compilations=split_inductor_compilations,
145
153
  **kwargs,
146
154
  )
147
155
  return output
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tritonparse
3
- Version: 0.2.4.dev20251003071457
3
+ Version: 0.2.4.dev20251005071455
4
4
  Summary: TritonParse: A Compiler Tracer, Visualizer, and mini-Reproducer Generator for Triton Kernels
5
5
  Author-email: Yueming Hao <yhao@meta.com>
6
6
  License-Expression: BSD-3-Clause
@@ -1,24 +1,26 @@
1
1
  tritonparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  tritonparse/__main__.py,sha256=YDBolsfpyAINExxCt7CDhHFno6nzEceE9Hzr3BUs6Hg,62
3
- tritonparse/common.py,sha256=aT7zIPKEiuvTq_MbHgMREuVIz-gVcsRskSDlvuIOHuQ,13662
4
- tritonparse/context_manager.py,sha256=M2HI4APCOZw3xQ05F1QAJIa89h-RajZByZJWpZDq7vI,596
3
+ tritonparse/common.py,sha256=9coQbzpyHWAAdv6lx2YQTualiIH49ULJuZTA7VB_V7A,13946
4
+ tritonparse/context_manager.py,sha256=M-zRZX8PX8onqBdDeIJ37VGVXmKZ_dFMC6eeZQchyNw,1583
5
5
  tritonparse/event_diff.py,sha256=yOD6uNxLJroatfx2nEGr-erw24ObOrHU9P6V5pzr8do,4907
6
6
  tritonparse/extract_source_mappings.py,sha256=Z6UxFj2cCE5NCWLQTYPKqUpLfbYhqP8xgCl5mvud9KI,1451
7
7
  tritonparse/ir_parser.py,sha256=1j1tP9jpUN7wH3e01bKUkUPgTMlNXUdp8LKRCC-WTro,9324
8
8
  tritonparse/mapper.py,sha256=prrczfi13P7Aa042OrEBsmRF1HW3jDhwxicANgPkWIM,4150
9
- tritonparse/shared_vars.py,sha256=-c9CvXJSDm9spYhDOJPEQProeT_xl3PaNmqTEYi_u4s,505
9
+ tritonparse/shared_vars.py,sha256=fCAW24mx9nENYoNbTy-tZjiN5-w6oGTO_av-Pw1J1TY,653
10
10
  tritonparse/source_type.py,sha256=nmYEQS8rfkIN9BhNhQbkmEvKnvS-3zAxRGLY4TaZdi8,1676
11
11
  tritonparse/sourcemap_utils.py,sha256=qsQmTDuEe9yuUVyxSHRbjTR38gi0hvJEijnPkrJVAV4,2037
12
- tritonparse/structured_logging.py,sha256=2M1UwC6eXUMV4ybIQiaibeUgYor2Zjh6S1CVGthOMDs,46720
12
+ tritonparse/structured_logging.py,sha256=y5FKi3mA-iR4SyzCUOfxlZnDpqRhCQC2rKFh2Nrb3kE,46869
13
13
  tritonparse/tp_logger.py,sha256=vXzY7hMDmVnRBGBhIjFZe3nHZzG5NKKPONGUszJhGgU,242
14
- tritonparse/trace_processor.py,sha256=QzUOKwnOkBbwTTKBsa5ZMUABPLMJIBFtTcG2SkhO0I8,12771
15
- tritonparse/utils.py,sha256=cO3c82PJfToW2pDsVicP3dFh1We3UVv3c3NqC_aTb_g,4312
14
+ tritonparse/trace_processor.py,sha256=brQBt26jdB6-quJXP5-warp2j31JSjOOFJa5ayiUZ5k,12963
15
+ tritonparse/utils.py,sha256=Jnlptcd79llSDev-_1XyyOnv2izUqv0PEL74A8GF2tc,4565
16
16
  tritonparse/reproducer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  tritonparse/reproducer/cli.py,sha256=bhtjD3k8pr7l2R2wmoleL-pGer2YndhUaLGnZq4rRBQ,948
18
- tritonparse/reproducer/orchestrator.py,sha256=Uy3CSntjzgd1VZrsHKARE0XTqBgpUukGo7C8b37m2JA,2640
18
+ tritonparse/reproducer/orchestrator.py,sha256=VliooEC1Z7kXXGiNnbFkDT48tBQ2FFKH0qRs3jlbVmY,2530
19
+ tritonparse/reproducer/placeholder_replacer.py,sha256=x9ddhIXVcYoEf6mBpByECPdZeA44rAXuo6a1cpy3F6c,4500
19
20
  tritonparse/reproducer/utils.py,sha256=UTclw48vH49g6Z2ljJL5DOZ6Rl4UDudyr0PeUySa3p8,13857
20
21
  tritonparse/reproducer/ingestion/ndjson.py,sha256=pEujTl5xXW2E2DEW8ngxXQ8qP9oawb90wBVTWHDs1jk,7372
21
- tritonparse/reproducer/templates/example.py,sha256=RExB1HVcHafopic3RF5_T40uNcRKmCMyLc18Bg94p4A,11686
22
+ tritonparse/reproducer/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ tritonparse/reproducer/templates/example.py,sha256=bN75A6sDu9eG7-gePj_VAG5YINVh47rtu8neGp9kV_g,11838
22
24
  tritonparse/reproducer/templates/loader.py,sha256=HqjfThdDVg7q2bYWry78sIaVRkUpkcA8KQDt83YrlVE,1920
23
25
  tritonparse/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
26
  tritonparse/tools/decompress_bin_ndjson.py,sha256=kpt7DM_sSA334F1X45xdkP2OR9LuB27Pc50EkGr6CPM,4144
@@ -27,9 +29,9 @@ tritonparse/tools/format_fix.py,sha256=Ol0Sjui8D7OzHwbamAfGnq8V5Y63uwNaFTKSORN5H
27
29
  tritonparse/tools/load_tensor.py,sha256=tfdmNVd9gsZqO6msQBhbXIhOvUzgc83yF64k2GDWPNk,2122
28
30
  tritonparse/tools/prettify_ndjson.py,sha256=r2YlHwFDTHgML7KljRmMsHaDg29q8gOQAgyDKWJhxRM,11062
29
31
  tritonparse/tools/readme.md,sha256=w6PWYfYnRgoPArLjxG9rVrpcLUkoVMGuRlbpF-o0IQM,110
30
- tritonparse-0.2.4.dev20251003071457.dist-info/licenses/LICENSE,sha256=4ZciugpyN7wcM4L-9pyDh_etvMUeIfBhDTyH1zeZlQM,1515
31
- tritonparse-0.2.4.dev20251003071457.dist-info/METADATA,sha256=kFGnK4LINAdILQ2KBbp7qUqJ_PIeqFT7dkD_57AvR1E,6580
32
- tritonparse-0.2.4.dev20251003071457.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
33
- tritonparse-0.2.4.dev20251003071457.dist-info/entry_points.txt,sha256=7P8TuH_nMXcPl1r8udA96SW8ccvAznZqTpCWLWDnV2o,53
34
- tritonparse-0.2.4.dev20251003071457.dist-info/top_level.txt,sha256=ITcTKgp3vf_bXV9vixuQU9IrZa3L1EfDSZwvRzRaoJU,12
35
- tritonparse-0.2.4.dev20251003071457.dist-info/RECORD,,
32
+ tritonparse-0.2.4.dev20251005071455.dist-info/licenses/LICENSE,sha256=4ZciugpyN7wcM4L-9pyDh_etvMUeIfBhDTyH1zeZlQM,1515
33
+ tritonparse-0.2.4.dev20251005071455.dist-info/METADATA,sha256=umNDdedZg6QDztQzFzobyvI2i9Dr27HCFkLrKjXCjjU,6580
34
+ tritonparse-0.2.4.dev20251005071455.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
+ tritonparse-0.2.4.dev20251005071455.dist-info/entry_points.txt,sha256=7P8TuH_nMXcPl1r8udA96SW8ccvAznZqTpCWLWDnV2o,53
36
+ tritonparse-0.2.4.dev20251005071455.dist-info/top_level.txt,sha256=ITcTKgp3vf_bXV9vixuQU9IrZa3L1EfDSZwvRzRaoJU,12
37
+ tritonparse-0.2.4.dev20251005071455.dist-info/RECORD,,