tritonparse 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tritonparse might be problematic. Click here for more details.

Files changed (40) hide show
  1. tritonparse/__init__.py +0 -0
  2. tritonparse/common.py +409 -0
  3. tritonparse/event_diff.py +120 -0
  4. tritonparse/extract_source_mappings.py +49 -0
  5. tritonparse/ir_parser.py +220 -0
  6. tritonparse/mapper.py +100 -0
  7. tritonparse/reproducer/__init__.py +21 -0
  8. tritonparse/reproducer/__main__.py +81 -0
  9. tritonparse/reproducer/cli.py +37 -0
  10. tritonparse/reproducer/config.py +15 -0
  11. tritonparse/reproducer/factory.py +16 -0
  12. tritonparse/reproducer/ingestion/__init__.py +6 -0
  13. tritonparse/reproducer/ingestion/ndjson.py +165 -0
  14. tritonparse/reproducer/orchestrator.py +65 -0
  15. tritonparse/reproducer/param_generator.py +142 -0
  16. tritonparse/reproducer/prompts/__init__.py +1 -0
  17. tritonparse/reproducer/prompts/loader.py +18 -0
  18. tritonparse/reproducer/providers/__init__.py +1 -0
  19. tritonparse/reproducer/providers/base.py +14 -0
  20. tritonparse/reproducer/providers/gemini.py +47 -0
  21. tritonparse/reproducer/runtime/__init__.py +1 -0
  22. tritonparse/reproducer/runtime/executor.py +13 -0
  23. tritonparse/reproducer/utils/io.py +6 -0
  24. tritonparse/shared_vars.py +9 -0
  25. tritonparse/source_type.py +56 -0
  26. tritonparse/sourcemap_utils.py +72 -0
  27. tritonparse/structured_logging.py +1046 -0
  28. tritonparse/tools/__init__.py +0 -0
  29. tritonparse/tools/decompress_bin_ndjson.py +118 -0
  30. tritonparse/tools/format_fix.py +149 -0
  31. tritonparse/tools/load_tensor.py +58 -0
  32. tritonparse/tools/prettify_ndjson.py +315 -0
  33. tritonparse/tp_logger.py +9 -0
  34. tritonparse/trace_processor.py +331 -0
  35. tritonparse/utils.py +156 -0
  36. tritonparse-0.1.1.dist-info/METADATA +10 -0
  37. tritonparse-0.1.1.dist-info/RECORD +40 -0
  38. tritonparse-0.1.1.dist-info/WHEEL +5 -0
  39. tritonparse-0.1.1.dist-info/licenses/LICENSE +29 -0
  40. tritonparse-0.1.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,331 @@
1
+ import gzip
2
+ import json
3
+ import logging
4
+ import os
5
+ from collections import defaultdict
6
+ from typing import Any, Dict, List
7
+
8
+ from .event_diff import _generate_launch_diff
9
+
10
+ from .ir_parser import (
11
+ extract_code_locations,
12
+ extract_loc_definitions,
13
+ extract_ptx_amdgcn_mappings,
14
+ )
15
+ from .mapper import create_bidirectional_mapping, create_python_mapping
16
+ from .sourcemap_utils import get_file_extension
17
+
18
+ logger = logging.getLogger("SourceMapping")
19
+
20
+
21
+ def generate_source_mappings(
22
+ ir_content: str, ir_type: str, other_mappings: List[Any] = None
23
+ ) -> Dict[str, Dict[str, Any]]:
24
+ """
25
+ Generate source mappings from intermediate representation (IR) content to the source file.
26
+ Example:
27
+ loc definition: Line 39 in ttir: #loc2 = loc("/tmp/torchinductor_yhao/yp/abcdef.py":20:28)
28
+ loc reference: Line 9 in ttir: %0 = tt.get_program_id x : i32 loc(#loc2)
29
+ Then, the output will be:
30
+ {
31
+ "9": {
32
+ "file": "/tmp/torchinductor_yhao/yp/abcdef.py",
33
+ "line": 20,
34
+ "column": 28,
35
+ "ttir_line": 9
36
+ },
37
+ }
38
+
39
+ Args:
40
+ ir_content (str): The content of the intermediate representation.
41
+ ir_type (str): The type of the intermediate representation (e.g., 'ttir').
42
+ other_mappings (List[Any]): A collection of additional mappings, primarily utilized for PTX mappings since PTX's location annotations reference the file name instead of the complete path.
43
+
44
+ Returns:
45
+ Dict[str, Dict[str, Any]]: A dictionary mapping line numbers to their corresponding source file,
46
+ line, column, and the line number in the IR.
47
+ """
48
+ if other_mappings is None:
49
+ other_mappings = []
50
+ if ir_type == "ptx" or ir_type == "amdgcn":
51
+ return extract_ptx_amdgcn_mappings(ir_content, other_mappings, ir_type)
52
+
53
+ loc_defs = extract_loc_definitions(ir_content)
54
+ logger.debug(f"Found {len(loc_defs)} #loc definitions")
55
+
56
+ loc_refs = extract_code_locations(ir_content)
57
+ logger.debug(f"Found {len(loc_refs)} loc references")
58
+
59
+ mappings = {}
60
+ for ln, loc_id in loc_refs.items():
61
+ if loc_id.startswith("direct:"):
62
+ _, file_path, line, col = loc_id.split(":", 3)
63
+ mappings[str(ln)] = {
64
+ "file": file_path,
65
+ "line": int(line),
66
+ "column": int(col),
67
+ f"{ir_type}_line": ln,
68
+ }
69
+ elif loc_id in loc_defs:
70
+ info = loc_defs[loc_id]
71
+ mappings[str(ln)] = {
72
+ "file": info["file"],
73
+ "line": info["line"],
74
+ "column": info["column"],
75
+ f"{ir_type}_line": ln,
76
+ }
77
+
78
+ return mappings
79
+
80
+
81
+ def process_ir(
82
+ key: str,
83
+ file_content: Dict[str, str],
84
+ file_path: Dict[str, str],
85
+ other_mappings: List[Any] = None,
86
+ ):
87
+ # Generate source mappings for each IR type
88
+ # the key should be the full file name with extension for the IR files
89
+ if not key:
90
+ return {}
91
+ logger.debug(f"Processing {key}")
92
+ ir_content = file_content.get(key, None)
93
+ if not ir_content:
94
+ ir_file_path = file_path.get(key, None)
95
+ if not ir_file_path:
96
+ logger.warning(f"No content found for {key}")
97
+ return {}
98
+ with open(ir_file_path, "r") as f:
99
+ ir_content = f.read()
100
+ mapping = generate_source_mappings(ir_content, key.split(".")[1], other_mappings)
101
+ logger.debug(f"Generated source mapping for {key}")
102
+ return mapping
103
+
104
+
105
+ def parse_single_trace_content(trace_content: str) -> str:
106
+ """
107
+ Process a single trace content and extract source code mappings.
108
+
109
+ This function takes a trace content as input, extracts the IR files, generates source mappings,
110
+ creates bidirectional mappings between different IR types, and updates the payload with the mappings.
111
+
112
+ Args:
113
+ trace_content (str): The content of the trace file as a string.
114
+
115
+ Returns:
116
+ str: The updated trace content with source mappings as a JSON string.
117
+ """
118
+
119
+ entry = json.loads(trace_content)
120
+ if entry.get("event_type") == "compilation":
121
+ payload = entry.setdefault("payload", {})
122
+ file_content = payload.get("file_content", {})
123
+ file_path = payload.get("file_path", {})
124
+
125
+ # Find the IR file keys
126
+ ttir_key = next((k for k in file_content if k.endswith(".ttir")), None)
127
+ ttgir_key = next((k for k in file_content if k.endswith(".ttgir")), None)
128
+ ptx_key = next((k for k in file_content if k.endswith(".ptx")), None)
129
+ amdgcn_key = next((k for k in file_content if k.endswith(".amdgcn")), None)
130
+ # Skip if no IR files found
131
+ if not (ttir_key or ttgir_key or ptx_key or amdgcn_key):
132
+ logger.warning("No IR files found in the payload.")
133
+ return trace_content
134
+
135
+ # generate ttir->source, ttgir->source, ptx->source
136
+ ttir_map = process_ir(ttir_key, file_content, file_path)
137
+ ttgir_map = process_ir(ttgir_key, file_content, file_path)
138
+ ptx_map = process_ir(ptx_key, file_content, file_path, [ttir_map, ttgir_map])
139
+ amdgcn_map = process_ir(
140
+ amdgcn_key, file_content, file_path, [ttir_map, ttgir_map]
141
+ )
142
+
143
+ # Create bidirectional mappings between all IR types
144
+ ir_maps = {
145
+ "ttir": ttir_map,
146
+ "ttgir": ttgir_map,
147
+ "ptx": ptx_map,
148
+ "amdgcn": amdgcn_map,
149
+ }
150
+
151
+ # Create mappings between all pairs of IR types
152
+ ir_types = list(ir_maps.keys())
153
+ for i, src_type in enumerate(ir_types):
154
+ for tgt_type in ir_types[i + 1 :]:
155
+ if ir_maps[src_type] and ir_maps[tgt_type]:
156
+ create_bidirectional_mapping(
157
+ ir_maps[src_type], ir_maps[tgt_type], src_type, tgt_type
158
+ )
159
+ logger.debug(
160
+ f"Created bidirectional mapping between {src_type} and {tgt_type}"
161
+ )
162
+
163
+ py_map = {}
164
+
165
+ if "python_source" in payload:
166
+ logger.debug(
167
+ f"Added Python source information (lines {payload['python_source']['start_line']}-{payload['python_source']['end_line']})"
168
+ )
169
+
170
+ # 4. Create Python source to IR mappings. We use the original line numbers as key in the python source code.
171
+ # Create a list of valid IR mappings, filtering out None keys
172
+ ir_mappings = []
173
+ ir_keys_and_maps = [
174
+ (ttir_key, ttir_map),
175
+ (ttgir_key, ttgir_map),
176
+ (ptx_key, ptx_map),
177
+ (amdgcn_key, amdgcn_map),
178
+ ]
179
+
180
+ for key, mapping in ir_keys_and_maps:
181
+ if key:
182
+ ir_mappings.append((get_file_extension(key), mapping))
183
+
184
+ py_map = create_python_mapping(ir_mappings)
185
+
186
+ # Store the mappings in the payload
187
+ payload["source_mappings"] = {
188
+ "ttir": ttir_map,
189
+ "ttgir": ttgir_map,
190
+ **({"ptx": ptx_map} if ptx_map else {}),
191
+ **({"amdgcn": amdgcn_map} if amdgcn_map else {}),
192
+ "python": py_map,
193
+ }
194
+ # NDJSON format requires a newline at the end of each line
195
+ return json.dumps(entry, separators=(",", ":")) + "\n"
196
+
197
+
198
+ def parse_single_file(
199
+ file_path: str,
200
+ output_dir: str = None,
201
+ split_by_frame_id_and_compile_id: bool = True,
202
+ ):
203
+ """
204
+ Process a single file, correctly group events by kernel, and extract mappings.
205
+
206
+ This function reads a trace file, groups compilation and launch events by
207
+ their kernel hash, generates a launch_diff event for each kernel, and writes
208
+ the processed data to output files.
209
+
210
+ Args:
211
+ file_path (str): The path to the file to be processed.
212
+ output_dir (str, optional): Directory to save the output files.
213
+ split_by_frame_id_and_compile_id (bool, optional): Whether to split
214
+ output files by frame_id and compile_id. Defaults to True.
215
+ """
216
+ kernels_by_hash = defaultdict(
217
+ lambda: {"compilation": None, "launches": [], "output_file": None}
218
+ )
219
+
220
+ output_dir = output_dir or os.path.dirname(file_path)
221
+ is_compressed_input = file_path.endswith(".bin.ndjson")
222
+ file_handle = (
223
+ gzip.open(file_path, "rt", encoding="utf-8")
224
+ if is_compressed_input
225
+ else open(file_path, "r")
226
+ )
227
+
228
+ with file_handle as f:
229
+ file_name = os.path.basename(file_path)
230
+ file_name_without_extension = (
231
+ file_name[:-11] if is_compressed_input else os.path.splitext(file_name)[0]
232
+ )
233
+
234
+ for i, line in enumerate(f):
235
+ logger.debug(f"Processing line {i + 1} in {file_path}")
236
+ json_str = line.strip()
237
+ if not json_str:
238
+ continue
239
+
240
+ # We don't need to generate full mappings for every line here,
241
+ # just enough to get the event type and necessary IDs.
242
+ try:
243
+ parsed_json = json.loads(json_str)
244
+ except json.JSONDecodeError:
245
+ logger.warning(f"Failed to parse JSON on line {i + 1} in {file_path}")
246
+ continue
247
+
248
+ event_type = parsed_json.get("event_type", None)
249
+ payload = parsed_json.get("payload", {})
250
+
251
+ if event_type == "compilation":
252
+ kernel_hash = payload.get("metadata", {}).get("hash")
253
+ if not kernel_hash:
254
+ continue
255
+
256
+ if split_by_frame_id_and_compile_id:
257
+ pt_info = payload.get("pt_info", {})
258
+ frame_id = pt_info.get("frame_id")
259
+ frame_compile_id = pt_info.get("frame_compile_id")
260
+ attempt_id = pt_info.get("attempt_id", 0)
261
+ cai = pt_info.get("compiled_autograd_id", "-")
262
+ if frame_id is not None or frame_compile_id is not None:
263
+ fname = f"f{frame_id}_fc{frame_compile_id}_a{attempt_id}_cai{cai}.ndjson"
264
+ else:
265
+ fname = f"{file_name_without_extension}_mapped.ndjson"
266
+ else:
267
+ fname = f"{file_name_without_extension}_mapped.ndjson"
268
+
269
+ output_file = os.path.join(output_dir, fname)
270
+ # The full processing is deferred until the final write.
271
+ kernels_by_hash[kernel_hash]["compilation"] = json_str
272
+ kernels_by_hash[kernel_hash]["output_file"] = output_file
273
+
274
+ elif event_type == "launch":
275
+ kernel_hash = parsed_json.get("compilation_metadata", {}).get("hash")
276
+ if kernel_hash:
277
+ kernels_by_hash[kernel_hash]["launches"].append(
278
+ (parsed_json, i + 1)
279
+ )
280
+
281
+ # Organize lines for final output, keyed by output file path
282
+ all_output_lines = defaultdict(list)
283
+ for _kernel_hash, data in kernels_by_hash.items():
284
+ compilation_json_str = data["compilation"]
285
+ launches_with_indices = data["launches"]
286
+ output_file = data["output_file"]
287
+
288
+ if not output_file:
289
+ logger.warning(f"No output file for kernel hash {_kernel_hash}, skipping.")
290
+ continue
291
+
292
+ # Process the compilation event now to include source mappings
293
+ if compilation_json_str:
294
+ processed_compilation_line = parse_single_trace_content(
295
+ compilation_json_str
296
+ )
297
+ all_output_lines[output_file].append(processed_compilation_line)
298
+ compilation_event = json.loads(processed_compilation_line)
299
+ else:
300
+ compilation_event = None
301
+
302
+ for launch_event, _ in launches_with_indices:
303
+ all_output_lines[output_file].append(
304
+ json.dumps(launch_event, separators=(",", ":")) + "\n"
305
+ )
306
+
307
+ if compilation_event and launches_with_indices:
308
+ sames, diffs, launch_index_map = _generate_launch_diff(
309
+ launches_with_indices
310
+ )
311
+ launch_diff_event = {
312
+ "event_type": "launch_diff",
313
+ "hash": _kernel_hash,
314
+ "name": compilation_event.get("payload", {})
315
+ .get("metadata", {})
316
+ .get("name"),
317
+ "total_launches": len(launches_with_indices),
318
+ "launch_index_map": launch_index_map,
319
+ "diffs": diffs,
320
+ "sames": sames,
321
+ }
322
+ all_output_lines[output_file].append(
323
+ json.dumps(launch_diff_event, separators=(",", ":")) + "\n"
324
+ )
325
+
326
+ if not os.path.exists(output_dir):
327
+ os.makedirs(output_dir)
328
+
329
+ for output_file, final_lines in all_output_lines.items():
330
+ with open(output_file, "w") as out:
331
+ out.writelines(final_lines)
tritonparse/utils.py ADDED
@@ -0,0 +1,156 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+
3
+ import argparse
4
+ import os
5
+ import shutil
6
+ from pathlib import Path
7
+ from typing import Optional
8
+
9
+ from .common import (
10
+ copy_local_to_tmpdir,
11
+ is_fbcode,
12
+ parse_logs,
13
+ print_parsed_files_summary,
14
+ RankConfig,
15
+ save_logs,
16
+ )
17
+ from .source_type import Source, SourceType
18
+
19
+ # argument parser for OSS
20
+ parser = None
21
+
22
+
23
+ def init_parser():
24
+ global parser
25
+
26
+ parser = argparse.ArgumentParser(
27
+ description="analyze triton structured logs", conflict_handler="resolve"
28
+ )
29
+
30
+ # Add arguments for the parse command
31
+ parser.add_argument(
32
+ "source",
33
+ help="Source of torch logs to be analyzed. It is expected to path to a local directory or log",
34
+ )
35
+ parser.add_argument(
36
+ "-o",
37
+ "--out",
38
+ help="Output directory.",
39
+ type=str,
40
+ )
41
+ parser.add_argument(
42
+ "--overwrite",
43
+ help="Delete out directory if it already exists. Only does something if --out is set",
44
+ action="store_true",
45
+ )
46
+ parser.add_argument("-r", "--rank", help="Rank of logs to be analyzed", type=int)
47
+ parser.add_argument(
48
+ "--all-ranks",
49
+ help="Analyze all ranks",
50
+ action="store_true",
51
+ )
52
+ parser.add_argument("-v", "--verbose", help="Verbose logging", action="store_true")
53
+ if is_fbcode():
54
+ from tritonparse.fb.utils import append_parser
55
+
56
+ append_parser(parser)
57
+ return parser
58
+
59
+
60
+ def oss_run(
61
+ source: str,
62
+ out: Optional[str] = None,
63
+ overwrite: Optional[bool] = False,
64
+ rank: Optional[int] = None,
65
+ all_ranks: bool = False,
66
+ verbose: bool = False,
67
+ ):
68
+ """
69
+ Main function for tritonparse. It is for OSS only.
70
+
71
+ Args:
72
+ source: Source of torch logs to be analyzed (required)
73
+ out: Output directory
74
+ overwrite: Delete out directory if it already exists
75
+ rank: Rank of logs to be analyzed
76
+ all_ranks: Analyze all ranks
77
+ verbose: Verbose logging
78
+ """
79
+ source = Source(source, verbose)
80
+ rank_config = RankConfig.from_cli_args(rank, all_ranks, source.type)
81
+
82
+ # Check output directory early if specified
83
+ if out is not None:
84
+ out_dir = Path(out)
85
+ if out_dir.exists():
86
+ if not overwrite:
87
+ raise RuntimeError(
88
+ f"{out_dir} already exists, pass --overwrite to overwrite"
89
+ )
90
+ shutil.rmtree(out_dir)
91
+ os.makedirs(out_dir, exist_ok=True)
92
+
93
+ # For signpost logging (not implemented in Python version)
94
+
95
+ if source.type == SourceType.LOCAL:
96
+ local_path = source.value
97
+ # Copy the results to a temp directory, then parse them
98
+ logs = copy_local_to_tmpdir(local_path, verbose)
99
+
100
+ elif source.type == SourceType.LOCAL_FILE:
101
+ local_path = source.value
102
+ # Copy the single file to a temp directory, then parse it
103
+ logs = copy_local_to_tmpdir(local_path, verbose)
104
+
105
+ parsed_log_dir, _ = parse_logs(logs, rank_config, verbose)
106
+ if out is not None:
107
+ save_logs(Path(out), parsed_log_dir, overwrite, verbose)
108
+ # Print beautiful summary of all parsed files
109
+ if out is not None:
110
+ out_dir = str(Path(out).absolute())
111
+ else:
112
+ out_dir = str(Path(parsed_log_dir).absolute())
113
+ print_parsed_files_summary(out_dir)
114
+
115
+
116
+ def unified_parse_from_cli():
117
+ parser = init_parser()
118
+ args = parser.parse_args()
119
+ return unified_parse(**vars(args))
120
+
121
+
122
+ def unified_parse(
123
+ source: str,
124
+ out: Optional[str] = None,
125
+ overwrite: Optional[bool] = False,
126
+ rank: Optional[int] = None,
127
+ all_ranks: bool = False,
128
+ verbose: bool = False,
129
+ **kwargs,
130
+ ):
131
+ """
132
+ Unified parse function that provides a flexible interface for parsing triton logs.
133
+
134
+ Args:
135
+ source: Input directory containing logs to parse.
136
+ out: Output directory for parsed results. By default, parsed logs will be saved to a temporary directory.
137
+ overwrite: Whether to overwrite existing output directory
138
+ rank: Specific rank to analyze
139
+ all_ranks: Whether to analyze all ranks
140
+ verbose: Whether to enable verbose logging
141
+ """
142
+ # Choose the appropriate parse function
143
+ if is_fbcode():
144
+ from tritonparse.fb.utils import fb_run as parse
145
+ else:
146
+ parse = oss_run
147
+
148
+ parse(
149
+ source=source,
150
+ out=out,
151
+ overwrite=overwrite,
152
+ rank=rank,
153
+ all_ranks=all_ranks,
154
+ verbose=verbose,
155
+ **kwargs,
156
+ )
@@ -0,0 +1,10 @@
1
+ Metadata-Version: 2.4
2
+ Name: tritonparse
3
+ Version: 0.1.1
4
+ Project-URL: Homepage, https://github.com/meta-pytorch/tritonparse
5
+ Requires-Python: >=3.10
6
+ License-File: LICENSE
7
+ Requires-Dist: triton>3.3.1
8
+ Provides-Extra: test
9
+ Requires-Dist: coverage>=7.0.0; extra == "test"
10
+ Dynamic: license-file
@@ -0,0 +1,40 @@
1
+ tritonparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ tritonparse/common.py,sha256=VWKFZJG7msIExMH0QNYCO8SHKqUBFtdRuLaCy_K8DFI,13725
3
+ tritonparse/event_diff.py,sha256=yOD6uNxLJroatfx2nEGr-erw24ObOrHU9P6V5pzr8do,4907
4
+ tritonparse/extract_source_mappings.py,sha256=Z6UxFj2cCE5NCWLQTYPKqUpLfbYhqP8xgCl5mvud9KI,1451
5
+ tritonparse/ir_parser.py,sha256=1j1tP9jpUN7wH3e01bKUkUPgTMlNXUdp8LKRCC-WTro,9324
6
+ tritonparse/mapper.py,sha256=prrczfi13P7Aa042OrEBsmRF1HW3jDhwxicANgPkWIM,4150
7
+ tritonparse/shared_vars.py,sha256=-c9CvXJSDm9spYhDOJPEQProeT_xl3PaNmqTEYi_u4s,505
8
+ tritonparse/source_type.py,sha256=nmYEQS8rfkIN9BhNhQbkmEvKnvS-3zAxRGLY4TaZdi8,1676
9
+ tritonparse/sourcemap_utils.py,sha256=qsQmTDuEe9yuUVyxSHRbjTR38gi0hvJEijnPkrJVAV4,2037
10
+ tritonparse/structured_logging.py,sha256=qWGkAr2oZT8Adxj-EfQqQdX2l-jq9xKi7WhBecYq2bg,39600
11
+ tritonparse/tp_logger.py,sha256=vXzY7hMDmVnRBGBhIjFZe3nHZzG5NKKPONGUszJhGgU,242
12
+ tritonparse/trace_processor.py,sha256=QzUOKwnOkBbwTTKBsa5ZMUABPLMJIBFtTcG2SkhO0I8,12771
13
+ tritonparse/utils.py,sha256=wt61tpbkqjGqHh0c7Nr2WlOv7PbQssmjULd6uA6aAko,4475
14
+ tritonparse/reproducer/__init__.py,sha256=VcCpYVUUmclWotkQmPLlDu5iFOUE4N-4FzcbzXwIow0,773
15
+ tritonparse/reproducer/__main__.py,sha256=ydLMbWx7SFlvAb1erObvKcJ-uxhHNShGRHRrZO6-5ww,2266
16
+ tritonparse/reproducer/cli.py,sha256=nnMgdT4tQzBWjPowOy2a_5QRsnsTMEAA3uegYpLEyRE,1165
17
+ tritonparse/reproducer/config.py,sha256=-hmE5ZqEtYo2WKjXbwMi6k6XzzfQZAbL50UURPvcF3A,478
18
+ tritonparse/reproducer/factory.py,sha256=sFcIjIayfHAqPqMVT8Rnsz9tpMmQXBzoOlKprS1P_1g,341
19
+ tritonparse/reproducer/orchestrator.py,sha256=iD7zZZHE4FU3nNOwNV9SUY2WUcpv_Amg0SvnRxrseEQ,2045
20
+ tritonparse/reproducer/param_generator.py,sha256=m-C_Z1TLd1ZX49EpsWELVfB6tkwOfi-ZHma7wXwz2g4,4654
21
+ tritonparse/reproducer/ingestion/__init__.py,sha256=2AQHxWlUl5JXM4a8F033wzxVnjCVPBEf-4H99kep-OA,99
22
+ tritonparse/reproducer/ingestion/ndjson.py,sha256=_E_dXXjxu438OYomQ1zFFk3jV9Wr1jNoXHiP2gJG7_4,5172
23
+ tritonparse/reproducer/prompts/__init__.py,sha256=da1PTClDMl-IBkrSvq6JC1lnS-K_BASzCvxVhNxN5Ls,13
24
+ tritonparse/reproducer/prompts/loader.py,sha256=n6Of98eEXNz9mI7ZH073X5FihNZD7tI-ehfjN_4yEl0,610
25
+ tritonparse/reproducer/providers/__init__.py,sha256=da1PTClDMl-IBkrSvq6JC1lnS-K_BASzCvxVhNxN5Ls,13
26
+ tritonparse/reproducer/providers/base.py,sha256=DgP_4AdrEf48kstOfBJFvK3pndcHH0vRUGjp6k1bdsY,362
27
+ tritonparse/reproducer/providers/gemini.py,sha256=VlOCdTGRTQdr3c2HMclKFIk-133puGSAjhK_6m6Zj9g,1609
28
+ tritonparse/reproducer/runtime/__init__.py,sha256=da1PTClDMl-IBkrSvq6JC1lnS-K_BASzCvxVhNxN5Ls,13
29
+ tritonparse/reproducer/runtime/executor.py,sha256=AqBFnoEqURoMGDdLC2G3WpHIP3Y4wWGJHEZrjS-NQFM,304
30
+ tritonparse/reproducer/utils/io.py,sha256=95NF9QCGawl-5p5c5yCQHynVBNKS_B_7nIrqnRvAt-E,200
31
+ tritonparse/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
+ tritonparse/tools/decompress_bin_ndjson.py,sha256=kpt7DM_sSA334F1X45xdkP2OR9LuB27Pc50EkGr6CPM,4144
33
+ tritonparse/tools/format_fix.py,sha256=Ol0Sjui8D7OzHwbamAfGnq8V5Y63uwNaFTKSORN5HkQ,3867
34
+ tritonparse/tools/load_tensor.py,sha256=tfdmNVd9gsZqO6msQBhbXIhOvUzgc83yF64k2GDWPNk,2122
35
+ tritonparse/tools/prettify_ndjson.py,sha256=VOzVWoXpCbaAXYA4i_wBcQIHfh-JhAx7xR4cF_L8yDs,10928
36
+ tritonparse-0.1.1.dist-info/licenses/LICENSE,sha256=4ZciugpyN7wcM4L-9pyDh_etvMUeIfBhDTyH1zeZlQM,1515
37
+ tritonparse-0.1.1.dist-info/METADATA,sha256=qETEInGJRT7fzf-Rl8cAf6QnEa5eJgiYo4rbwBA63yc,287
38
+ tritonparse-0.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
+ tritonparse-0.1.1.dist-info/top_level.txt,sha256=ITcTKgp3vf_bXV9vixuQU9IrZa3L1EfDSZwvRzRaoJU,12
40
+ tritonparse-0.1.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,29 @@
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2019, pytorch
4
+ All rights reserved.
5
+
6
+ Redistribution and use in source and binary forms, with or without
7
+ modification, are permitted provided that the following conditions are met:
8
+
9
+ 1. Redistributions of source code must retain the above copyright notice, this
10
+ list of conditions and the following disclaimer.
11
+
12
+ 2. Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+
16
+ 3. Neither the name of the copyright holder nor the names of its
17
+ contributors may be used to endorse or promote products derived from
18
+ this software without specific prior written permission.
19
+
20
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1 @@
1
+ tritonparse