ssrjson-benchmark 0.0.2__tar.gz → 0.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ssrjson-benchmark might be problematic. Click here for more details.
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/CMakeLists.txt +1 -1
- {ssrjson_benchmark-0.0.2/src/ssrjson_benchmark.egg-info → ssrjson_benchmark-0.0.4}/PKG-INFO +4 -3
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/README.md +2 -2
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/pyproject.toml +2 -2
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/setup.py +1 -13
- ssrjson_benchmark-0.0.2/src/benchmark.c → ssrjson_benchmark-0.0.4/src/_ssrjson_benchmark.c +36 -61
- ssrjson_benchmark-0.0.4/src/ssrjson_benchmark/__init__.py +19 -0
- ssrjson_benchmark-0.0.4/src/ssrjson_benchmark/__main__.py +83 -0
- ssrjson_benchmark-0.0.4/src/ssrjson_benchmark/benchmark_impl.py +759 -0
- ssrjson_benchmark-0.0.4/src/ssrjson_benchmark/result_types.py +88 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/template.md +1 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4/src/ssrjson_benchmark.egg-info}/PKG-INFO +4 -3
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark.egg-info/SOURCES.txt +3 -2
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark.egg-info/requires.txt +1 -0
- ssrjson_benchmark-0.0.2/src/ssrjson_benchmark/__init__.py +0 -17
- ssrjson_benchmark-0.0.2/src/ssrjson_benchmark/__main__.py +0 -54
- ssrjson_benchmark-0.0.2/src/ssrjson_benchmark/benchmark_main.py +0 -668
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/LICENSE +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/MANIFEST.in +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/cmake/XcodeProperty.cmake +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/setup.cfg +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/MotionsQuestionsAnswersQuestions2016.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/apache.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/canada.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/ctm.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/github.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/instruments.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/mesh.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/simple_object.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/simple_object_zh.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/truenull.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/tweet.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark/_files/twitter.json +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark.egg-info/dependency_links.txt +0 -0
- {ssrjson_benchmark-0.0.2 → ssrjson_benchmark-0.0.4}/src/ssrjson_benchmark.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ssrjson-benchmark
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.4
|
|
4
4
|
Summary: benchmark of ssrJSON
|
|
5
5
|
Author-email: Eritque Arcus <eritque-arcus@ikuyo.dev>, Antares <antares0982@gmail.com>
|
|
6
6
|
License: MIT License
|
|
@@ -40,6 +40,7 @@ Description-Content-Type: text/markdown
|
|
|
40
40
|
License-File: LICENSE
|
|
41
41
|
Requires-Dist: ssrjson
|
|
42
42
|
Requires-Dist: orjson
|
|
43
|
+
Requires-Dist: ujson
|
|
43
44
|
Requires-Dist: matplotlib
|
|
44
45
|
Provides-Extra: pdf
|
|
45
46
|
Requires-Dist: svglib; extra == "pdf"
|
|
@@ -84,7 +85,7 @@ python -m ssrjson_benchmark
|
|
|
84
85
|
|
|
85
86
|
## Notes
|
|
86
87
|
|
|
87
|
-
* This repository conducts benchmarking using json, orjson, and ssrJSON. The `dumps` benchmark produces str objects, comparing three operations: `json.dumps`, `orjson.dumps` followed by decode, and `ssrjson.dumps`. The `dumps_to_bytes` benchmark produces bytes objects, comparing three functions: `json.dumps` followed by encode, `orjson.dumps`, and `ssrjson.dumps_to_bytes`.
|
|
88
|
-
* When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `
|
|
88
|
+
* This repository conducts benchmarking using json, [orjson](https://github.com/ijl/orjson), [ujson](https://github.com/ultrajson/ultrajson), and [ssrJSON](https://github.com/Antares0982/ssrjson). The `dumps` benchmark produces str objects, comparing three operations: `json.dumps`, `orjson.dumps` followed by decode, and `ssrjson.dumps`. The `dumps_to_bytes` benchmark produces bytes objects, comparing three functions: `json.dumps` followed by encode, `orjson.dumps`, and `ssrjson.dumps_to_bytes`.
|
|
89
|
+
* When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `_benchmark_invalidate_dump_cache` functions, which ensure that new `PyUnicodeObject`s are different for each input every time. (ref: [orjson#586](https://github.com/ijl/orjson/issues/586))
|
|
89
90
|
* The performance of JSON encoding is primarily constrained by the speed of writing to the buffer, whereas decoding performance is mainly limited by the frequent invocation of CPython interfaces for object creation. During decoding, both ssrJSON and orjson employ short key caching to reduce the number of object creations, and this caching mechanism is global in both cases. As a result, decoding benchmark tests may not accurately reflect the conditions encountered in real-world production environments.
|
|
90
91
|
* The files simple_object.json and simple_object_zh.json do not represent real-world data; they are solely used to compare the performance of the fast path. Therefore, the benchmark results should not be interpreted as indicative of actual performance.
|
|
@@ -34,7 +34,7 @@ python -m ssrjson_benchmark
|
|
|
34
34
|
|
|
35
35
|
## Notes
|
|
36
36
|
|
|
37
|
-
* This repository conducts benchmarking using json, orjson, and ssrJSON. The `dumps` benchmark produces str objects, comparing three operations: `json.dumps`, `orjson.dumps` followed by decode, and `ssrjson.dumps`. The `dumps_to_bytes` benchmark produces bytes objects, comparing three functions: `json.dumps` followed by encode, `orjson.dumps`, and `ssrjson.dumps_to_bytes`.
|
|
38
|
-
* When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `
|
|
37
|
+
* This repository conducts benchmarking using json, [orjson](https://github.com/ijl/orjson), [ujson](https://github.com/ultrajson/ultrajson), and [ssrJSON](https://github.com/Antares0982/ssrjson). The `dumps` benchmark produces str objects, comparing three operations: `json.dumps`, `orjson.dumps` followed by decode, and `ssrjson.dumps`. The `dumps_to_bytes` benchmark produces bytes objects, comparing three functions: `json.dumps` followed by encode, `orjson.dumps`, and `ssrjson.dumps_to_bytes`.
|
|
38
|
+
* When orjson handles non-ASCII strings, if the cache of the `PyUnicodeObject`’s UTF-8 representation does not exist, it invokes the `PyUnicode_AsUTF8AndSize` function to obtain the UTF-8 encoding. This function then caches the UTF-8 representation within the `PyUnicodeObject`. If the same `PyUnicodeObject` undergoes repeated encode-decode operations, subsequent calls after the initial one will execute more quickly due to this caching. However, in real-world production scenarios, it is uncommon to perform JSON encode-decode repeatedly on the exact same string object; even identical strings are unlikely to be the same object instance. To achieve benchmark results that better reflect practical use cases, we employ `ssrjson.run_unicode_accumulate_benchmark` and `_benchmark_invalidate_dump_cache` functions, which ensure that new `PyUnicodeObject`s are different for each input every time. (ref: [orjson#586](https://github.com/ijl/orjson/issues/586))
|
|
39
39
|
* The performance of JSON encoding is primarily constrained by the speed of writing to the buffer, whereas decoding performance is mainly limited by the frequent invocation of CPython interfaces for object creation. During decoding, both ssrJSON and orjson employ short key caching to reduce the number of object creations, and this caching mechanism is global in both cases. As a result, decoding benchmark tests may not accurately reflect the conditions encountered in real-world production environments.
|
|
40
40
|
* The files simple_object.json and simple_object_zh.json do not represent real-world data; they are solely used to compare the performance of the fast path. Therefore, the benchmark results should not be interpreted as indicative of actual performance.
|
|
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|
|
5
5
|
|
|
6
6
|
[project]
|
|
7
7
|
name = "ssrjson-benchmark"
|
|
8
|
-
version = "0.0.
|
|
8
|
+
version = "0.0.4"
|
|
9
9
|
authors = [
|
|
10
10
|
{ name = "Eritque Arcus", email = "eritque-arcus@ikuyo.dev" },
|
|
11
11
|
{ name = "Antares", email = "antares0982@gmail.com" },
|
|
@@ -27,7 +27,7 @@ classifiers = [
|
|
|
27
27
|
# license-files = ["LICENSE"]
|
|
28
28
|
# setuptools < 77 but deprecated
|
|
29
29
|
license = { file = "LICENSE" }
|
|
30
|
-
dependencies = ["ssrjson", "orjson", "matplotlib"]
|
|
30
|
+
dependencies = ["ssrjson", "orjson", "ujson", "matplotlib"]
|
|
31
31
|
|
|
32
32
|
[project.optional-dependencies]
|
|
33
33
|
pdf = ["svglib", "reportlab"]
|
|
@@ -51,7 +51,7 @@ setup(
|
|
|
51
51
|
ext_modules=[
|
|
52
52
|
Extension(
|
|
53
53
|
"_ssrjson_benchmark",
|
|
54
|
-
sources=["src/
|
|
54
|
+
sources=["src/_ssrjson_benchmark.c"],
|
|
55
55
|
language="c",
|
|
56
56
|
)
|
|
57
57
|
],
|
|
@@ -62,18 +62,6 @@ setup(
|
|
|
62
62
|
"ssrjson_benchmark._files": ["*.json"],
|
|
63
63
|
},
|
|
64
64
|
include_package_data=True,
|
|
65
|
-
install_requires=[
|
|
66
|
-
"ssrjson",
|
|
67
|
-
"orjson",
|
|
68
|
-
"matplotlib",
|
|
69
|
-
],
|
|
70
|
-
extras_require={
|
|
71
|
-
"all": [
|
|
72
|
-
"svglib",
|
|
73
|
-
"reportlab",
|
|
74
|
-
"py-cpuinfo",
|
|
75
|
-
],
|
|
76
|
-
},
|
|
77
65
|
cmdclass={
|
|
78
66
|
"build_ext": CMakeBuild,
|
|
79
67
|
},
|
|
@@ -76,15 +76,15 @@ usize perf_counter(void) {
|
|
|
76
76
|
|
|
77
77
|
#endif
|
|
78
78
|
|
|
79
|
-
typedef struct
|
|
79
|
+
typedef struct PyUnicodeCopyInfo {
|
|
80
80
|
Py_ssize_t size;
|
|
81
81
|
int kind;
|
|
82
82
|
Py_UCS4 max_char;
|
|
83
83
|
bool valid;
|
|
84
|
-
}
|
|
84
|
+
} PyUnicodeCopyInfo;
|
|
85
85
|
|
|
86
|
-
PyObject *_copy_unicode(PyObject *unicode,
|
|
87
|
-
if (!
|
|
86
|
+
PyObject *_copy_unicode(PyObject *unicode, PyUnicodeCopyInfo *unicode_copy_info) {
|
|
87
|
+
if (!unicode_copy_info->valid) {
|
|
88
88
|
// create copy of unicode object.
|
|
89
89
|
int kind = PyUnicode_KIND(unicode);
|
|
90
90
|
Py_UCS4 max_char;
|
|
@@ -98,15 +98,16 @@ PyObject *_copy_unicode(PyObject *unicode, PyUnicodeNewCallArg *call_arg) {
|
|
|
98
98
|
max_char = 0xff;
|
|
99
99
|
}
|
|
100
100
|
//
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
101
|
+
unicode_copy_info->size = PyUnicode_GET_LENGTH(unicode);
|
|
102
|
+
unicode_copy_info->kind = kind;
|
|
103
|
+
unicode_copy_info->max_char = max_char;
|
|
104
|
+
unicode_copy_info->valid = true;
|
|
105
105
|
}
|
|
106
106
|
|
|
107
|
-
PyObject *unicode_copy = PyUnicode_New(
|
|
107
|
+
PyObject *unicode_copy = PyUnicode_New(unicode_copy_info->size, unicode_copy_info->max_char);
|
|
108
|
+
if (!unicode_copy) return NULL;
|
|
108
109
|
memcpy(PyUnicode_DATA(unicode_copy), PyUnicode_DATA(unicode),
|
|
109
|
-
|
|
110
|
+
unicode_copy_info->size * unicode_copy_info->kind);
|
|
110
111
|
return unicode_copy;
|
|
111
112
|
}
|
|
112
113
|
|
|
@@ -129,61 +130,33 @@ PyObject *_parse_additional_args(PyObject *additional_args) {
|
|
|
129
130
|
return new_args;
|
|
130
131
|
}
|
|
131
132
|
|
|
132
|
-
PyObject *
|
|
133
|
-
|
|
134
|
-
PyObject *
|
|
135
|
-
usize
|
|
136
|
-
|
|
137
|
-
PyObject *additional_args = NULL;
|
|
138
|
-
static const char *kwlist[] = {"func", "repeat", "unicode", "args", NULL};
|
|
139
|
-
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OKO|O", (char **)kwlist,
|
|
140
|
-
&callable, &repeat, &unicode,
|
|
141
|
-
&additional_args)) {
|
|
133
|
+
PyObject *copy_unicode_list_invalidate_cache(PyObject *self, PyObject *args, PyObject *kwargs) {
|
|
134
|
+
static const char *kwlist[] = {"s", "size", NULL};
|
|
135
|
+
PyObject *s;
|
|
136
|
+
usize size;
|
|
137
|
+
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OK", (char **)kwlist, &s, &size)) {
|
|
142
138
|
PyErr_SetString(PyExc_TypeError, "Invalid argument");
|
|
143
|
-
|
|
139
|
+
return NULL;
|
|
144
140
|
}
|
|
145
|
-
if (!
|
|
146
|
-
PyErr_SetString(PyExc_TypeError, "First argument must be
|
|
147
|
-
|
|
141
|
+
if (!PyUnicode_CheckExact(s)) {
|
|
142
|
+
PyErr_SetString(PyExc_TypeError, "First argument must be str, not other types or subclass of str");
|
|
143
|
+
return NULL;
|
|
148
144
|
}
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
145
|
+
PyObject *ret = PyList_New(size);
|
|
146
|
+
if (!ret) {
|
|
147
|
+
return NULL;
|
|
152
148
|
}
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
usize
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
goto fail;
|
|
161
|
-
PyObject *unicode_copy = _copy_unicode(unicode, &call_arg);
|
|
162
|
-
if (!unicode_copy) {
|
|
163
|
-
Py_DECREF(new_args);
|
|
164
|
-
goto fail;
|
|
165
|
-
}
|
|
166
|
-
PyTuple_SET_ITEM(new_args, 0, unicode_copy);
|
|
167
|
-
usize start = perf_counter();
|
|
168
|
-
PyObject *result = PyObject_Call(callable, new_args, NULL);
|
|
169
|
-
usize end = perf_counter();
|
|
170
|
-
assert(unicode_copy->ob_refcnt == 1);
|
|
171
|
-
Py_DECREF(new_args);
|
|
172
|
-
unicode_copy = NULL;
|
|
173
|
-
new_args = NULL;
|
|
174
|
-
if (unlikely(!result)) {
|
|
175
|
-
if (!PyErr_Occurred()) {
|
|
176
|
-
PyErr_SetString(PyExc_RuntimeError, "Failed to call callable");
|
|
177
|
-
}
|
|
178
|
-
goto fail;
|
|
179
|
-
} else {
|
|
180
|
-
Py_DECREF(result);
|
|
149
|
+
PyUnicodeCopyInfo unicode_copy_info;
|
|
150
|
+
unicode_copy_info.valid = false;
|
|
151
|
+
for (usize i = 0; i < size; i++) {
|
|
152
|
+
PyObject *s_copy = _copy_unicode(s, &unicode_copy_info);
|
|
153
|
+
if (!s_copy) {
|
|
154
|
+
Py_DECREF(ret);
|
|
155
|
+
return NULL;
|
|
181
156
|
}
|
|
182
|
-
|
|
157
|
+
PyList_SET_ITEM(ret, i, s_copy);
|
|
183
158
|
}
|
|
184
|
-
return
|
|
185
|
-
fail:;
|
|
186
|
-
return NULL;
|
|
159
|
+
return ret;
|
|
187
160
|
}
|
|
188
161
|
|
|
189
162
|
PyObject *run_object_accumulate_benchmark(PyObject *self, PyObject *args,
|
|
@@ -197,6 +170,7 @@ PyObject *run_object_accumulate_benchmark(PyObject *self, PyObject *args,
|
|
|
197
170
|
PyErr_SetString(PyExc_TypeError, "Invalid argument");
|
|
198
171
|
goto fail;
|
|
199
172
|
}
|
|
173
|
+
//
|
|
200
174
|
if (!PyCallable_Check(callable)) {
|
|
201
175
|
PyErr_SetString(PyExc_TypeError, "First argument must be callable");
|
|
202
176
|
goto fail;
|
|
@@ -205,6 +179,7 @@ PyObject *run_object_accumulate_benchmark(PyObject *self, PyObject *args,
|
|
|
205
179
|
PyErr_SetString(PyExc_TypeError, "Third argument must be tuple");
|
|
206
180
|
goto fail;
|
|
207
181
|
}
|
|
182
|
+
//
|
|
208
183
|
usize total = 0;
|
|
209
184
|
for (usize i = 0; i < repeat; i++) {
|
|
210
185
|
usize start = perf_counter();
|
|
@@ -308,7 +283,7 @@ fail:;
|
|
|
308
283
|
}
|
|
309
284
|
|
|
310
285
|
static PyMethodDef ssrjson_benchmark_methods[] = {
|
|
311
|
-
{"
|
|
286
|
+
{"copy_unicode_list_invalidate_cache", (PyCFunction)copy_unicode_list_invalidate_cache, METH_VARARGS | METH_KEYWORDS, "Copy unicode list invalidate cache."},
|
|
312
287
|
{"run_object_accumulate_benchmark", (PyCFunction)run_object_accumulate_benchmark, METH_VARARGS | METH_KEYWORDS, "Benchmark."},
|
|
313
288
|
{"run_object_benchmark", (PyCFunction)run_object_benchmark, METH_VARARGS | METH_KEYWORDS, "Benchmark."},
|
|
314
289
|
{"inspect_pyunicode", (PyCFunction)inspect_pyunicode, METH_VARARGS | METH_KEYWORDS, "Inspect PyUnicode."},
|
|
@@ -317,7 +292,7 @@ static PyMethodDef ssrjson_benchmark_methods[] = {
|
|
|
317
292
|
|
|
318
293
|
static struct PyModuleDef moduledef = {
|
|
319
294
|
PyModuleDef_HEAD_INIT,
|
|
320
|
-
"_ssrjson_benchmark",
|
|
295
|
+
"_ssrjson_benchmark", /* m_name */
|
|
321
296
|
0, /* m_doc */
|
|
322
297
|
0, /* m_size */
|
|
323
298
|
ssrjson_benchmark_methods, /* m_methods */
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from .benchmark_impl import (
|
|
2
|
+
generate_report_pdf,
|
|
3
|
+
generate_report_markdown,
|
|
4
|
+
run_benchmark,
|
|
5
|
+
)
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
from importlib.metadata import version
|
|
9
|
+
|
|
10
|
+
__version__ = version("ssrjson-benchmark")
|
|
11
|
+
except Exception:
|
|
12
|
+
__version__ = "0.0.0"
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"run_benchmark",
|
|
16
|
+
"generate_report_markdown",
|
|
17
|
+
"generate_report_pdf",
|
|
18
|
+
"__version__",
|
|
19
|
+
]
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
def main():
|
|
2
|
+
import argparse
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
from .benchmark_impl import (
|
|
9
|
+
generate_report_markdown,
|
|
10
|
+
generate_report_pdf,
|
|
11
|
+
parse_file_result,
|
|
12
|
+
run_benchmark,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
parser = argparse.ArgumentParser()
|
|
16
|
+
|
|
17
|
+
parser.add_argument(
|
|
18
|
+
"-f", "--file", help="record JSON file", required=False, default=None
|
|
19
|
+
)
|
|
20
|
+
parser.add_argument(
|
|
21
|
+
"-d",
|
|
22
|
+
"--in-dir",
|
|
23
|
+
help="Benchmark JSON files directory",
|
|
24
|
+
required=False,
|
|
25
|
+
)
|
|
26
|
+
parser.add_argument(
|
|
27
|
+
"-m",
|
|
28
|
+
"--markdown",
|
|
29
|
+
help="Generate Markdown report",
|
|
30
|
+
required=False,
|
|
31
|
+
action="store_true",
|
|
32
|
+
)
|
|
33
|
+
parser.add_argument(
|
|
34
|
+
"--no-pdf",
|
|
35
|
+
help="Don't generate PDF report",
|
|
36
|
+
required=False,
|
|
37
|
+
action="store_true",
|
|
38
|
+
)
|
|
39
|
+
parser.add_argument(
|
|
40
|
+
"--process-bytes",
|
|
41
|
+
help="Total process bytes per test, default 1e8",
|
|
42
|
+
required=False,
|
|
43
|
+
default=1e8,
|
|
44
|
+
type=int,
|
|
45
|
+
)
|
|
46
|
+
parser.add_argument(
|
|
47
|
+
"--out-dir",
|
|
48
|
+
help="Output directory for reports",
|
|
49
|
+
required=False,
|
|
50
|
+
default=os.getcwd(),
|
|
51
|
+
)
|
|
52
|
+
args = parser.parse_args()
|
|
53
|
+
if args.file and args.no_pdf and not args.markdown:
|
|
54
|
+
print("Nothing to do.")
|
|
55
|
+
sys.exit(0)
|
|
56
|
+
|
|
57
|
+
_benchmark_files_dir = args.in_dir
|
|
58
|
+
if not _benchmark_files_dir:
|
|
59
|
+
_benchmark_files_dir = os.path.join(
|
|
60
|
+
os.path.dirname(os.path.abspath(__file__)), "_files"
|
|
61
|
+
)
|
|
62
|
+
benchmark_files_dir = sorted(pathlib.Path(_benchmark_files_dir).glob("*.json"))
|
|
63
|
+
if not benchmark_files_dir:
|
|
64
|
+
print(f"No benchmark file found using given path: {_benchmark_files_dir}")
|
|
65
|
+
sys.exit(0)
|
|
66
|
+
|
|
67
|
+
if args.file:
|
|
68
|
+
with open(args.file, "rb") as f:
|
|
69
|
+
result_ = json.load(f)
|
|
70
|
+
result = parse_file_result(result_)
|
|
71
|
+
file = args.file.split("/")[-1]
|
|
72
|
+
else:
|
|
73
|
+
result, file = run_benchmark(benchmark_files_dir, args.process_bytes)
|
|
74
|
+
file = file.split("/")[-1]
|
|
75
|
+
|
|
76
|
+
if args.markdown:
|
|
77
|
+
generate_report_markdown(result, file, args.out_dir)
|
|
78
|
+
if not args.no_pdf:
|
|
79
|
+
generate_report_pdf(result, file, args.out_dir)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
if __name__ == "__main__":
|
|
83
|
+
main()
|