ssrjson-benchmark 0.0.3__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ssrjson-benchmark might be problematic. Click here for more details.

Files changed (35) hide show
  1. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/CMakeLists.txt +1 -1
  2. {ssrjson_benchmark-0.0.3/src/ssrjson_benchmark.egg-info → ssrjson_benchmark-0.0.5}/PKG-INFO +4 -25
  3. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/README.md +1 -1
  4. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/pyproject.toml +7 -7
  5. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/setup.py +18 -17
  6. ssrjson_benchmark-0.0.3/src/benchmark.c → ssrjson_benchmark-0.0.5/src/_ssrjson_benchmark.c +36 -61
  7. ssrjson_benchmark-0.0.5/src/ssrjson_benchmark/__init__.py +19 -0
  8. ssrjson_benchmark-0.0.5/src/ssrjson_benchmark/__main__.py +97 -0
  9. ssrjson_benchmark-0.0.5/src/ssrjson_benchmark/benchmark_impl.py +826 -0
  10. ssrjson_benchmark-0.0.5/src/ssrjson_benchmark/result_types.py +88 -0
  11. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5/src/ssrjson_benchmark.egg-info}/PKG-INFO +4 -25
  12. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark.egg-info/SOURCES.txt +3 -2
  13. ssrjson_benchmark-0.0.3/src/ssrjson_benchmark/__init__.py +0 -17
  14. ssrjson_benchmark-0.0.3/src/ssrjson_benchmark/__main__.py +0 -54
  15. ssrjson_benchmark-0.0.3/src/ssrjson_benchmark/benchmark_main.py +0 -676
  16. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/LICENSE +0 -0
  17. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/MANIFEST.in +0 -0
  18. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/cmake/XcodeProperty.cmake +0 -0
  19. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/setup.cfg +0 -0
  20. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/MotionsQuestionsAnswersQuestions2016.json +0 -0
  21. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/apache.json +0 -0
  22. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/canada.json +0 -0
  23. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/ctm.json +0 -0
  24. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/github.json +0 -0
  25. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/instruments.json +0 -0
  26. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/mesh.json +0 -0
  27. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/simple_object.json +0 -0
  28. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/simple_object_zh.json +0 -0
  29. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/truenull.json +0 -0
  30. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/tweet.json +0 -0
  31. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/_files/twitter.json +0 -0
  32. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark/template.md +0 -0
  33. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark.egg-info/dependency_links.txt +0 -0
  34. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark.egg-info/requires.txt +0 -0
  35. {ssrjson_benchmark-0.0.3 → ssrjson_benchmark-0.0.5}/src/ssrjson_benchmark.egg-info/top_level.txt +0 -0
@@ -59,7 +59,7 @@ message("CMAKE_SYSTEM_PROCESSOR = ${CMAKE_SYSTEM_PROCESSOR}")
59
59
 
60
60
  # set ssrjson_benchmark src files
61
61
  set(SRC_FILES
62
- src/benchmark.c
62
+ src/_ssrjson_benchmark.c
63
63
  )
64
64
 
65
65
  add_library(ssrjson_benchmark SHARED ${SRC_FILES})
@@ -1,30 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ssrjson-benchmark
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: benchmark of ssrJSON
5
5
  Author-email: Eritque Arcus <eritque-arcus@ikuyo.dev>, Antares <antares0982@gmail.com>
6
- License: MIT License
7
-
8
- Copyright (c) 2025 Eritque arcus
9
-
10
- Permission is hereby granted, free of charge, to any person obtaining a copy
11
- of this software and associated documentation files (the "Software"), to deal
12
- in the Software without restriction, including without limitation the rights
13
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
- copies of the Software, and to permit persons to whom the Software is
15
- furnished to do so, subject to the following conditions:
16
-
17
- The above copyright notice and this permission notice shall be included in all
18
- copies or substantial portions of the Software.
19
-
20
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
- SOFTWARE.
27
-
6
+ License-Expression: MIT
28
7
  Project-URL: Homepage, https://github.com/Nambers/ssrJSON-benchmark
29
8
  Project-URL: Issues, https://github.com/Nambers/ssrJSON-benchmark/issues
30
9
  Project-URL: Repository, https://github.com/Nambers/ssrJSON-benchmark.git
@@ -35,7 +14,7 @@ Classifier: Intended Audience :: Developers
35
14
  Classifier: Topic :: System :: Benchmark
36
15
  Classifier: Programming Language :: Python :: 3
37
16
  Classifier: Operating System :: OS Independent
38
- Requires-Python: >=3.9
17
+ Requires-Python: >=3.10
39
18
  Description-Content-Type: text/markdown
40
19
  License-File: LICENSE
41
20
  Requires-Dist: ssrjson
@@ -86,6 +65,6 @@ python -m ssrjson_benchmark
86
65
  ## Notes
87
66
 
88
67
  * This repository conducts benchmarking using json, [orjson](https://github.com/ijl/orjson), [ujson](https://github.com/ultrajson/ultrajson), and [ssrJSON](https://github.com/Antares0982/ssrjson). The `dumps` benchmark produces str objects, comparing three operations: `json.dumps`, `orjson.dumps` followed by decode, and `ssrjson.dumps`. The `dumps_to_bytes` benchmark produces bytes objects, comparing three functions: `json.dumps` followed by encode, `orjson.dumps`, and `ssrjson.dumps_to_bytes`.
89
- * When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `benchmark_invalidate_dump_cache` functions, which ensure that new `PyUnicodeObject`s are different for each input every time. (ref: [orjson#586](https://github.com/ijl/orjson/issues/586))
68
+ * When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `_benchmark_invalidate_dump_cache` functions, which ensure that new `PyUnicodeObject`s are different for each input every time. (ref: [orjson#586](https://github.com/ijl/orjson/issues/586))
90
69
  * The performance of JSON encoding is primarily constrained by the speed of writing to the buffer, whereas decoding performance is mainly limited by the frequent invocation of CPython interfaces for object creation. During decoding, both ssrJSON and orjson employ short key caching to reduce the number of object creations, and this caching mechanism is global in both cases. As a result, decoding benchmark tests may not accurately reflect the conditions encountered in real-world production environments.
91
70
  * The files simple_object.json and simple_object_zh.json do not represent real-world data; they are solely used to compare the performance of the fast path. Therefore, the benchmark results should not be interpreted as indicative of actual performance.
@@ -35,6 +35,6 @@ python -m ssrjson_benchmark
35
35
  ## Notes
36
36
 
37
37
  * This repository conducts benchmarking using json, [orjson](https://github.com/ijl/orjson), [ujson](https://github.com/ultrajson/ultrajson), and [ssrJSON](https://github.com/Antares0982/ssrjson). The `dumps` benchmark produces str objects, comparing three operations: `json.dumps`, `orjson.dumps` followed by decode, and `ssrjson.dumps`. The `dumps_to_bytes` benchmark produces bytes objects, comparing three functions: `json.dumps` followed by encode, `orjson.dumps`, and `ssrjson.dumps_to_bytes`.
38
- * When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `benchmark_invalidate_dump_cache` functions, which ensure that new `PyUnicodeObject`s are different for each input every time. (ref: [orjson#586](https://github.com/ijl/orjson/issues/586))
38
+ * When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `_benchmark_invalidate_dump_cache` functions, which ensure that new `PyUnicodeObject`s are different for each input every time. (ref: [orjson#586](https://github.com/ijl/orjson/issues/586))
39
39
  * The performance of JSON encoding is primarily constrained by the speed of writing to the buffer, whereas decoding performance is mainly limited by the frequent invocation of CPython interfaces for object creation. During decoding, both ssrJSON and orjson employ short key caching to reduce the number of object creations, and this caching mechanism is global in both cases. As a result, decoding benchmark tests may not accurately reflect the conditions encountered in real-world production environments.
40
40
  * The files simple_object.json and simple_object_zh.json do not represent real-world data; they are solely used to compare the performance of the fast path. Therefore, the benchmark results should not be interpreted as indicative of actual performance.
@@ -1,18 +1,18 @@
1
1
  [build-system]
2
- # requires = ["setuptools >= 77.0.3"]
3
- requires = ["setuptools >= 64"]
2
+ requires = ["setuptools >= 77.0.3"]
3
+ # requires = ["setuptools >= 64"]
4
4
  build-backend = "setuptools.build_meta"
5
5
 
6
6
  [project]
7
7
  name = "ssrjson-benchmark"
8
- version = "0.0.3"
8
+ version = "0.0.5"
9
9
  authors = [
10
10
  { name = "Eritque Arcus", email = "eritque-arcus@ikuyo.dev" },
11
11
  { name = "Antares", email = "antares0982@gmail.com" },
12
12
  ]
13
13
  description = "benchmark of ssrJSON"
14
14
  readme = "README.md"
15
- requires-python = ">=3.9"
15
+ requires-python = ">=3.10"
16
16
  keywords = ["ssrjson", "benchmark", "json"]
17
17
  classifiers = [
18
18
  "Development Status :: 5 - Production/Stable",
@@ -23,10 +23,10 @@ classifiers = [
23
23
  "Operating System :: OS Independent",
24
24
  ]
25
25
  # setuptools >= 77
26
- # license = "MIT"
27
- # license-files = ["LICENSE"]
26
+ license = "MIT"
27
+ license-files = ["LICENSE"]
28
28
  # setuptools < 77 but deprecated
29
- license = { file = "LICENSE" }
29
+ # license = { file = "LICENSE" }
30
30
  dependencies = ["ssrjson", "orjson", "ujson", "matplotlib"]
31
31
 
32
32
  [project.optional-dependencies]
@@ -6,25 +6,38 @@ from setuptools import Extension, setup
6
6
  from setuptools.command.build_ext import build_ext
7
7
 
8
8
 
9
+ def run_check(cmd):
10
+ try:
11
+ subprocess.run(cmd, check=True, capture_output=True, text=True)
12
+ except subprocess.CalledProcessError as e:
13
+ print(f"command failed: `{' '.join(cmd)}`")
14
+ if e.stdout:
15
+ print(f"stdout: {e.stdout}")
16
+ print(f"stderr: {e.stderr}")
17
+ raise
18
+ except Exception as e:
19
+ print(f"command failed: `{' '.join(cmd)}`")
20
+ print(f"error: {e}")
21
+ raise
22
+
9
23
  class CMakeBuild(build_ext):
10
24
  def run(self):
11
25
  build_dir = os.path.abspath("build")
12
26
  if not os.path.exists(build_dir):
13
27
  os.makedirs(build_dir)
14
- cmake_cmd = [
28
+ run_check([
15
29
  "cmake",
16
30
  "-DCMAKE_BUILD_TYPE=Release",
17
31
  ".",
18
32
  "-B",
19
33
  "build",
20
- ]
21
- subprocess.check_call(cmake_cmd)
34
+ ])
22
35
 
23
36
  if os.name == "nt":
24
37
  build_cmd = ["cmake", "--build", "build", "--config", "Release"]
25
38
  else:
26
39
  build_cmd = ["cmake", "--build", "build"]
27
- subprocess.check_call(build_cmd)
40
+ run_check(build_cmd)
28
41
 
29
42
  if os.name == "nt":
30
43
  built_filename = "Release/_ssrjson_benchmark.dll"
@@ -51,7 +64,7 @@ setup(
51
64
  ext_modules=[
52
65
  Extension(
53
66
  "_ssrjson_benchmark",
54
- sources=["src/benchmark.c"],
67
+ sources=["src/_ssrjson_benchmark.c"],
55
68
  language="c",
56
69
  )
57
70
  ],
@@ -62,18 +75,6 @@ setup(
62
75
  "ssrjson_benchmark._files": ["*.json"],
63
76
  },
64
77
  include_package_data=True,
65
- # install_requires=[
66
- # "ssrjson",
67
- # "orjson",
68
- # "matplotlib",
69
- # ],
70
- # extras_require={
71
- # "all": [
72
- # "svglib",
73
- # "reportlab",
74
- # "py-cpuinfo",
75
- # ],
76
- # },
77
78
  cmdclass={
78
79
  "build_ext": CMakeBuild,
79
80
  },
@@ -76,15 +76,15 @@ usize perf_counter(void) {
76
76
 
77
77
  #endif
78
78
 
79
- typedef struct PyUnicodeNewCallArg {
79
+ typedef struct PyUnicodeCopyInfo {
80
80
  Py_ssize_t size;
81
81
  int kind;
82
82
  Py_UCS4 max_char;
83
83
  bool valid;
84
- } PyUnicodeNewCallArg;
84
+ } PyUnicodeCopyInfo;
85
85
 
86
- PyObject *_copy_unicode(PyObject *unicode, PyUnicodeNewCallArg *call_arg) {
87
- if (!call_arg->valid) {
86
+ PyObject *_copy_unicode(PyObject *unicode, PyUnicodeCopyInfo *unicode_copy_info) {
87
+ if (!unicode_copy_info->valid) {
88
88
  // create copy of unicode object.
89
89
  int kind = PyUnicode_KIND(unicode);
90
90
  Py_UCS4 max_char;
@@ -98,15 +98,16 @@ PyObject *_copy_unicode(PyObject *unicode, PyUnicodeNewCallArg *call_arg) {
98
98
  max_char = 0xff;
99
99
  }
100
100
  //
101
- call_arg->size = PyUnicode_GET_LENGTH(unicode);
102
- call_arg->kind = kind;
103
- call_arg->max_char = max_char;
104
- call_arg->valid = true;
101
+ unicode_copy_info->size = PyUnicode_GET_LENGTH(unicode);
102
+ unicode_copy_info->kind = kind;
103
+ unicode_copy_info->max_char = max_char;
104
+ unicode_copy_info->valid = true;
105
105
  }
106
106
 
107
- PyObject *unicode_copy = PyUnicode_New(call_arg->size, call_arg->max_char);
107
+ PyObject *unicode_copy = PyUnicode_New(unicode_copy_info->size, unicode_copy_info->max_char);
108
+ if (!unicode_copy) return NULL;
108
109
  memcpy(PyUnicode_DATA(unicode_copy), PyUnicode_DATA(unicode),
109
- call_arg->size * call_arg->kind);
110
+ unicode_copy_info->size * unicode_copy_info->kind);
110
111
  return unicode_copy;
111
112
  }
112
113
 
@@ -129,61 +130,33 @@ PyObject *_parse_additional_args(PyObject *additional_args) {
129
130
  return new_args;
130
131
  }
131
132
 
132
- PyObject *run_unicode_accumulate_benchmark(PyObject *self, PyObject *args,
133
- PyObject *kwargs) {
134
- PyObject *callable;
135
- usize repeat;
136
- PyObject *unicode;
137
- PyObject *additional_args = NULL;
138
- static const char *kwlist[] = {"func", "repeat", "unicode", "args", NULL};
139
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OKO|O", (char **)kwlist,
140
- &callable, &repeat, &unicode,
141
- &additional_args)) {
133
+ PyObject *copy_unicode_list_invalidate_cache(PyObject *self, PyObject *args, PyObject *kwargs) {
134
+ static const char *kwlist[] = {"s", "size", NULL};
135
+ PyObject *s;
136
+ usize size;
137
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OK", (char **)kwlist, &s, &size)) {
142
138
  PyErr_SetString(PyExc_TypeError, "Invalid argument");
143
- goto fail;
139
+ return NULL;
144
140
  }
145
- if (!PyCallable_Check(callable)) {
146
- PyErr_SetString(PyExc_TypeError, "First argument must be callable");
147
- goto fail;
141
+ if (!PyUnicode_CheckExact(s)) {
142
+ PyErr_SetString(PyExc_TypeError, "First argument must be str, not other types or subclass of str");
143
+ return NULL;
148
144
  }
149
- if (!PyUnicode_Check(unicode)) {
150
- PyErr_SetString(PyExc_TypeError, "Third argument must be unicode");
151
- goto fail;
145
+ PyObject *ret = PyList_New(size);
146
+ if (!ret) {
147
+ return NULL;
152
148
  }
153
- PyUnicodeNewCallArg call_arg;
154
- call_arg.valid = false;
155
- usize total = 0;
156
- for (usize i = 0; i < repeat; i++) {
157
- // create copy of unicode object.
158
- PyObject *new_args = _parse_additional_args(additional_args);
159
- if (!new_args)
160
- goto fail;
161
- PyObject *unicode_copy = _copy_unicode(unicode, &call_arg);
162
- if (!unicode_copy) {
163
- Py_DECREF(new_args);
164
- goto fail;
165
- }
166
- PyTuple_SET_ITEM(new_args, 0, unicode_copy);
167
- usize start = perf_counter();
168
- PyObject *result = PyObject_Call(callable, new_args, NULL);
169
- usize end = perf_counter();
170
- assert(unicode_copy->ob_refcnt == 1);
171
- Py_DECREF(new_args);
172
- unicode_copy = NULL;
173
- new_args = NULL;
174
- if (unlikely(!result)) {
175
- if (!PyErr_Occurred()) {
176
- PyErr_SetString(PyExc_RuntimeError, "Failed to call callable");
177
- }
178
- goto fail;
179
- } else {
180
- Py_DECREF(result);
149
+ PyUnicodeCopyInfo unicode_copy_info;
150
+ unicode_copy_info.valid = false;
151
+ for (usize i = 0; i < size; i++) {
152
+ PyObject *s_copy = _copy_unicode(s, &unicode_copy_info);
153
+ if (!s_copy) {
154
+ Py_DECREF(ret);
155
+ return NULL;
181
156
  }
182
- total += end - start;
157
+ PyList_SET_ITEM(ret, i, s_copy);
183
158
  }
184
- return PyLong_FromUnsignedLongLong(total);
185
- fail:;
186
- return NULL;
159
+ return ret;
187
160
  }
188
161
 
189
162
  PyObject *run_object_accumulate_benchmark(PyObject *self, PyObject *args,
@@ -197,6 +170,7 @@ PyObject *run_object_accumulate_benchmark(PyObject *self, PyObject *args,
197
170
  PyErr_SetString(PyExc_TypeError, "Invalid argument");
198
171
  goto fail;
199
172
  }
173
+ //
200
174
  if (!PyCallable_Check(callable)) {
201
175
  PyErr_SetString(PyExc_TypeError, "First argument must be callable");
202
176
  goto fail;
@@ -205,6 +179,7 @@ PyObject *run_object_accumulate_benchmark(PyObject *self, PyObject *args,
205
179
  PyErr_SetString(PyExc_TypeError, "Third argument must be tuple");
206
180
  goto fail;
207
181
  }
182
+ //
208
183
  usize total = 0;
209
184
  for (usize i = 0; i < repeat; i++) {
210
185
  usize start = perf_counter();
@@ -308,7 +283,7 @@ fail:;
308
283
  }
309
284
 
310
285
  static PyMethodDef ssrjson_benchmark_methods[] = {
311
- {"run_unicode_accumulate_benchmark", (PyCFunction)run_unicode_accumulate_benchmark, METH_VARARGS | METH_KEYWORDS, "Benchmark."},
286
+ {"copy_unicode_list_invalidate_cache", (PyCFunction)copy_unicode_list_invalidate_cache, METH_VARARGS | METH_KEYWORDS, "Copy unicode list invalidate cache."},
312
287
  {"run_object_accumulate_benchmark", (PyCFunction)run_object_accumulate_benchmark, METH_VARARGS | METH_KEYWORDS, "Benchmark."},
313
288
  {"run_object_benchmark", (PyCFunction)run_object_benchmark, METH_VARARGS | METH_KEYWORDS, "Benchmark."},
314
289
  {"inspect_pyunicode", (PyCFunction)inspect_pyunicode, METH_VARARGS | METH_KEYWORDS, "Inspect PyUnicode."},
@@ -317,7 +292,7 @@ static PyMethodDef ssrjson_benchmark_methods[] = {
317
292
 
318
293
  static struct PyModuleDef moduledef = {
319
294
  PyModuleDef_HEAD_INIT,
320
- "_ssrjson_benchmark", /* m_name */
295
+ "_ssrjson_benchmark", /* m_name */
321
296
  0, /* m_doc */
322
297
  0, /* m_size */
323
298
  ssrjson_benchmark_methods, /* m_methods */
@@ -0,0 +1,19 @@
1
+ from .benchmark_impl import (
2
+ generate_report_pdf,
3
+ generate_report_markdown,
4
+ run_benchmark,
5
+ )
6
+
7
+ try:
8
+ from importlib.metadata import version
9
+
10
+ __version__ = version("ssrjson-benchmark")
11
+ except Exception:
12
+ __version__ = "0.0.0"
13
+
14
+ __all__ = [
15
+ "run_benchmark",
16
+ "generate_report_markdown",
17
+ "generate_report_pdf",
18
+ "__version__",
19
+ ]
@@ -0,0 +1,97 @@
1
+ def main():
2
+ import argparse
3
+ import json
4
+ import os
5
+ import pathlib
6
+ import sys
7
+
8
+ from .benchmark_impl import (
9
+ generate_report_markdown,
10
+ generate_report_pdf,
11
+ parse_file_result,
12
+ run_benchmark,
13
+ )
14
+
15
+ parser = argparse.ArgumentParser()
16
+
17
+ parser.add_argument(
18
+ "-f", "--file", help="record JSON file", required=False, default=None
19
+ )
20
+ parser.add_argument(
21
+ "-d",
22
+ "--in-dir",
23
+ help="Benchmark JSON files directory",
24
+ required=False,
25
+ )
26
+ parser.add_argument(
27
+ "-m",
28
+ "--markdown",
29
+ help="Generate Markdown report",
30
+ required=False,
31
+ action="store_true",
32
+ )
33
+ parser.add_argument(
34
+ "--no-pdf",
35
+ help="Don't generate PDF report",
36
+ required=False,
37
+ action="store_true",
38
+ )
39
+ parser.add_argument(
40
+ "--process-gigabytes",
41
+ help="Total process gigabytes per test, default 0.1 (float)",
42
+ required=False,
43
+ default=0.1,
44
+ type=float,
45
+ )
46
+ parser.add_argument(
47
+ "--bin-process-megabytes",
48
+ help="Maximum bytes to process per read for binary formats, default 32 (int)",
49
+ required=False,
50
+ default=32,
51
+ type=int,
52
+ )
53
+ parser.add_argument(
54
+ "--out-dir",
55
+ help="Output directory for reports",
56
+ required=False,
57
+ default=os.getcwd(),
58
+ )
59
+ args = parser.parse_args()
60
+ if args.file and args.no_pdf and not args.markdown:
61
+ print("Nothing to do.")
62
+ sys.exit(0)
63
+
64
+ _benchmark_files_dir = args.in_dir
65
+ if not _benchmark_files_dir:
66
+ _benchmark_files_dir = os.path.join(
67
+ os.path.dirname(os.path.abspath(__file__)), "_files"
68
+ )
69
+ benchmark_files_dir = sorted(pathlib.Path(_benchmark_files_dir).glob("*.json"))
70
+ if not benchmark_files_dir:
71
+ print(f"No benchmark file found using given path: {_benchmark_files_dir}")
72
+ sys.exit(0)
73
+
74
+ if args.file:
75
+ with open(args.file, "rb") as f:
76
+ result_ = json.load(f)
77
+ result = parse_file_result(result_)
78
+ file = args.file.split("/")[-1]
79
+ else:
80
+ process_bytes = int(args.process_gigabytes * 1024 * 1024 * 1024)
81
+ bin_process_bytes = args.bin_process_megabytes * 1024 * 1024
82
+ if process_bytes <= 0 or bin_process_bytes <= 0:
83
+ print("process-gigabytes and bin-process-megabytes must be positive.")
84
+ sys.exit(1)
85
+ result, file = run_benchmark(
86
+ benchmark_files_dir, process_bytes, bin_process_bytes
87
+ )
88
+ file = file.split("/")[-1]
89
+
90
+ if args.markdown:
91
+ generate_report_markdown(result, file, args.out_dir)
92
+ if not args.no_pdf:
93
+ generate_report_pdf(result, file, args.out_dir)
94
+
95
+
96
+ if __name__ == "__main__":
97
+ main()