mesh-rb 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile.lock +1 -1
  3. data/ext/mesh/extconf.rb +22 -4
  4. data/ext/mesh/mesh.tar.gz +0 -0
  5. data/lib/mesh/version.rb +1 -1
  6. data/mesh.gemspec +3 -2
  7. metadata +4 -120
  8. data/ext/mesh/mesh/.bazelrc +0 -20
  9. data/ext/mesh/mesh/.bazelversion +0 -1
  10. data/ext/mesh/mesh/.clang-format +0 -15
  11. data/ext/mesh/mesh/.dockerignore +0 -5
  12. data/ext/mesh/mesh/.editorconfig +0 -16
  13. data/ext/mesh/mesh/.gitattributes +0 -4
  14. data/ext/mesh/mesh/.github/workflows/main.yml +0 -144
  15. data/ext/mesh/mesh/.gitignore +0 -51
  16. data/ext/mesh/mesh/AUTHORS +0 -5
  17. data/ext/mesh/mesh/CMakeLists.txt +0 -270
  18. data/ext/mesh/mesh/CODE_OF_CONDUCT.md +0 -77
  19. data/ext/mesh/mesh/Dockerfile +0 -30
  20. data/ext/mesh/mesh/LICENSE +0 -201
  21. data/ext/mesh/mesh/Makefile +0 -81
  22. data/ext/mesh/mesh/README.md +0 -97
  23. data/ext/mesh/mesh/WORKSPACE +0 -50
  24. data/ext/mesh/mesh/bazel +0 -350
  25. data/ext/mesh/mesh/mesh-pldi19-powers.pdf +0 -0
  26. data/ext/mesh/mesh/src/BUILD +0 -222
  27. data/ext/mesh/mesh/src/CMakeLists.txt +0 -85
  28. data/ext/mesh/mesh/src/bitmap.h +0 -590
  29. data/ext/mesh/mesh/src/cheap_heap.h +0 -170
  30. data/ext/mesh/mesh/src/common.h +0 -377
  31. data/ext/mesh/mesh/src/copts.bzl +0 -31
  32. data/ext/mesh/mesh/src/d_assert.cc +0 -75
  33. data/ext/mesh/mesh/src/fixed_array.h +0 -124
  34. data/ext/mesh/mesh/src/global_heap.cc +0 -547
  35. data/ext/mesh/mesh/src/global_heap.h +0 -569
  36. data/ext/mesh/mesh/src/gnu_wrapper.cc +0 -75
  37. data/ext/mesh/mesh/src/internal.h +0 -356
  38. data/ext/mesh/mesh/src/libmesh.cc +0 -239
  39. data/ext/mesh/mesh/src/mac_wrapper.cc +0 -528
  40. data/ext/mesh/mesh/src/measure_rss.cc +0 -44
  41. data/ext/mesh/mesh/src/measure_rss.h +0 -20
  42. data/ext/mesh/mesh/src/meshable_arena.cc +0 -776
  43. data/ext/mesh/mesh/src/meshable_arena.h +0 -309
  44. data/ext/mesh/mesh/src/meshing.h +0 -60
  45. data/ext/mesh/mesh/src/mini_heap.h +0 -532
  46. data/ext/mesh/mesh/src/mmap_heap.h +0 -104
  47. data/ext/mesh/mesh/src/one_way_mmap_heap.h +0 -77
  48. data/ext/mesh/mesh/src/partitioned_heap.h +0 -111
  49. data/ext/mesh/mesh/src/plasma/mesh.h +0 -33
  50. data/ext/mesh/mesh/src/real.cc +0 -52
  51. data/ext/mesh/mesh/src/real.h +0 -36
  52. data/ext/mesh/mesh/src/rng/mwc.h +0 -296
  53. data/ext/mesh/mesh/src/rng/mwc64.h +0 -58
  54. data/ext/mesh/mesh/src/rpl_printf.c +0 -1991
  55. data/ext/mesh/mesh/src/runtime.cc +0 -393
  56. data/ext/mesh/mesh/src/runtime.h +0 -114
  57. data/ext/mesh/mesh/src/shuffle_vector.h +0 -287
  58. data/ext/mesh/mesh/src/size_classes.def +0 -251
  59. data/ext/mesh/mesh/src/static/if.h +0 -36
  60. data/ext/mesh/mesh/src/static/log.h +0 -43
  61. data/ext/mesh/mesh/src/testing/benchmark/local_refill.cc +0 -103
  62. data/ext/mesh/mesh/src/testing/big-alloc.c +0 -28
  63. data/ext/mesh/mesh/src/testing/fragmenter.cc +0 -128
  64. data/ext/mesh/mesh/src/testing/global-large-stress.cc +0 -25
  65. data/ext/mesh/mesh/src/testing/local-alloc.c +0 -16
  66. data/ext/mesh/mesh/src/testing/meshing_benchmark.cc +0 -189
  67. data/ext/mesh/mesh/src/testing/thread.cc +0 -35
  68. data/ext/mesh/mesh/src/testing/unit/alignment.cc +0 -56
  69. data/ext/mesh/mesh/src/testing/unit/bitmap_test.cc +0 -274
  70. data/ext/mesh/mesh/src/testing/unit/concurrent_mesh_test.cc +0 -185
  71. data/ext/mesh/mesh/src/testing/unit/mesh_test.cc +0 -143
  72. data/ext/mesh/mesh/src/testing/unit/rng_test.cc +0 -22
  73. data/ext/mesh/mesh/src/testing/unit/size_class_test.cc +0 -66
  74. data/ext/mesh/mesh/src/testing/unit/triple_mesh_test.cc +0 -285
  75. data/ext/mesh/mesh/src/testing/userfaultfd-kernel-copy.cc +0 -164
  76. data/ext/mesh/mesh/src/thread_local_heap.cc +0 -163
  77. data/ext/mesh/mesh/src/thread_local_heap.h +0 -268
  78. data/ext/mesh/mesh/src/wrapper.cc +0 -433
  79. data/ext/mesh/mesh/support/export_mesh.cmake +0 -28
  80. data/ext/mesh/mesh/support/gen-size-classes +0 -57
  81. data/ext/mesh/mesh/support/install_all_configs +0 -33
  82. data/ext/mesh/mesh/support/remove_export_mesh.cmake +0 -48
  83. data/ext/mesh/mesh/support/update-bazelisk +0 -8
  84. data/ext/mesh/mesh/theory/32m80.png +0 -0
  85. data/ext/mesh/mesh/theory/64m80ind.png +0 -0
  86. data/ext/mesh/mesh/theory/bound_comparison.py +0 -67
  87. data/ext/mesh/mesh/theory/bounds/impdeg+1 +0 -135
  88. data/ext/mesh/mesh/theory/choose.py +0 -43
  89. data/ext/mesh/mesh/theory/common.py +0 -42
  90. data/ext/mesh/mesh/theory/compute_exp_Y.py +0 -134
  91. data/ext/mesh/mesh/theory/createRandomString.py +0 -69
  92. data/ext/mesh/mesh/theory/deg_bound_check.py +0 -100
  93. data/ext/mesh/mesh/theory/degcheck.py +0 -47
  94. data/ext/mesh/mesh/theory/dumps/32,1,80,dumb.txt +0 -81
  95. data/ext/mesh/mesh/theory/dumps/32,2,80,dumb.txt +0 -81
  96. data/ext/mesh/mesh/theory/dumps/32,3,80,dumb.txt +0 -81
  97. data/ext/mesh/mesh/theory/dumps/32,4,80,dumb.txt +0 -81
  98. data/ext/mesh/mesh/theory/dumps/32,5,80,dumb.txt +0 -81
  99. data/ext/mesh/mesh/theory/dumps/32,6,80,dumb.txt +0 -81
  100. data/ext/mesh/mesh/theory/dumps/32,7,80,dumb.txt +0 -81
  101. data/ext/mesh/mesh/theory/dumps/32,8,80,dumb.txt +0 -81
  102. data/ext/mesh/mesh/theory/dumps/32,9,80,dumb.txt +0 -81
  103. data/ext/mesh/mesh/theory/experiment.py +0 -303
  104. data/ext/mesh/mesh/theory/experiment_raw_results/.gitignore +0 -0
  105. data/ext/mesh/mesh/theory/greedy_experiment.py +0 -66
  106. data/ext/mesh/mesh/theory/greedy_experiment_copy.py +0 -46
  107. data/ext/mesh/mesh/theory/greedy_experiment_q.py +0 -75
  108. data/ext/mesh/mesh/theory/makeGraph.py +0 -64
  109. data/ext/mesh/mesh/theory/manyreps.png +0 -0
  110. data/ext/mesh/mesh/theory/manystrings.png +0 -0
  111. data/ext/mesh/mesh/theory/match_vs_color_experiment.py +0 -94
  112. data/ext/mesh/mesh/theory/maxmatch_vs_E[Y].py +0 -162
  113. data/ext/mesh/mesh/theory/maxmatch_vs_greedymatch.py +0 -96
  114. data/ext/mesh/mesh/theory/maxvdeg+1imp++32,80.png +0 -0
  115. data/ext/mesh/mesh/theory/mesh_util.py +0 -322
  116. data/ext/mesh/mesh/theory/meshers.py +0 -452
  117. data/ext/mesh/mesh/theory/meshingBenchmark.py +0 -96
  118. data/ext/mesh/mesh/theory/occupancyComparison.py +0 -133
  119. data/ext/mesh/mesh/theory/randmatch_vs_greedymatch.py +0 -97
  120. data/ext/mesh/mesh/theory/randmatch_vs_greedymatch_q.py +0 -103
  121. data/ext/mesh/mesh/theory/randmatch_vs_greedymatch_time.py +0 -117
  122. data/ext/mesh/mesh/theory/read_mesh_dump.py +0 -82
  123. data/ext/mesh/mesh/theory/test.py +0 -70
  124. data/ext/mesh/mesh/tools/bazel +0 -1
@@ -1,44 +0,0 @@
1
- // -*- mode: c++; c-basic-offset: 2; indent-tabs-mode: nil -*-
2
- // Copyright 2019 The Mesh Authors. All rights reserved.
3
- // Use of this source code is governed by the Apache License,
4
- // Version 2.0, that can be found in the LICENSE file.
5
-
6
- #include <cstdlib>
7
- #include <cstring>
8
-
9
- #include <fcntl.h>
10
- #include <sys/stat.h>
11
- #include <sys/types.h>
12
- #include <unistd.h>
13
-
14
- #include "measure_rss.h"
15
-
16
- extern "C" int get_rss_kb() {
17
- constexpr size_t bufLen = 4096;
18
- char buf[bufLen];
19
-
20
- memset(buf, 0, bufLen);
21
-
22
- int fd = open("/proc/self/status", O_RDONLY | O_CLOEXEC);
23
- if (fd < 0)
24
- return -1;
25
-
26
- ssize_t bytesRead = read(fd, buf, bufLen - 1);
27
- close(fd);
28
-
29
- if (bytesRead == -1)
30
- return -1;
31
-
32
- for (char *line = buf; line != nullptr && *line != 0; line = strchr(line, '\n')) {
33
- if (*line == '\n')
34
- line++;
35
- if (strncmp(line, "VmRSS:", strlen("VmRSS:")) != 0) {
36
- continue;
37
- }
38
-
39
- char *rssString = line + strlen("VmRSS:");
40
- return atoi(rssString);
41
- }
42
-
43
- return -1;
44
- }
@@ -1,20 +0,0 @@
1
- // -*- mode: c++; c-basic-offset: 2; indent-tabs-mode: nil -*-
2
- // Copyright 2019 The Mesh Authors. All rights reserved.
3
- // Use of this source code is governed by the Apache License,
4
- // Version 2.0, that can be found in the LICENSE file.
5
-
6
- #pragma once
7
- #ifndef MESH_MEASURE_RSS_H
8
- #define MESH_MEASURE_RSS_H
9
-
10
- #ifdef __cplusplus
11
- extern "C" {
12
- #endif
13
-
14
- int get_rss_kb(void);
15
-
16
- #ifdef __cplusplus
17
- }
18
- #endif
19
-
20
- #endif // MESH_MEASURE_RSS_H
@@ -1,776 +0,0 @@
1
- // -*- mode: c++; c-basic-offset: 2; indent-tabs-mode: nil -*-
2
- // Copyright 2019 The Mesh Authors. All rights reserved.
3
- // Use of this source code is governed by the Apache License,
4
- // Version 2.0, that can be found in the LICENSE file.
5
-
6
- #ifdef __linux__
7
- #define USE_MEMFD 1
8
- #include <linux/fs.h>
9
- #endif
10
- // #undef USE_MEMFD
11
-
12
- #ifdef USE_MEMFD
13
- #include <sys/syscall.h>
14
- #include <unistd.h>
15
-
16
- //#include <sys/memfd.h>
17
- //#include <asm/unistd_64.h>
18
- #include <sys/syscall.h>
19
- #include <linux/memfd.h>
20
- #endif
21
-
22
- #include <sys/ioctl.h>
23
-
24
- #include <algorithm>
25
-
26
- #include "meshable_arena.h"
27
- #include "mini_heap.h"
28
- #include "runtime.h"
29
-
30
- namespace mesh {
31
-
32
- static void *arenaInstance;
33
-
34
- static const char *const TMP_DIRS[] = {
35
- "/dev/shm",
36
- "/tmp",
37
- };
38
-
39
- MeshableArena::MeshableArena() : SuperHeap(), _fastPrng(internal::seed(), internal::seed()) {
40
- d_assert(arenaInstance == nullptr);
41
- arenaInstance = this;
42
-
43
- int fd = -1;
44
- if (kMeshingEnabled) {
45
- fd = openSpanFile(kArenaSize);
46
- if (fd < 0) {
47
- debug("mesh: opening arena file failed.\n");
48
- abort();
49
- }
50
- }
51
- _fd = fd;
52
- _arenaBegin = SuperHeap::map(kArenaSize, kMapShared, fd);
53
- _mhIndex = reinterpret_cast<atomic<MiniHeapID> *>(SuperHeap::malloc(indexSize()));
54
-
55
- hard_assert(_arenaBegin != nullptr);
56
- hard_assert(_mhIndex != nullptr);
57
-
58
- if (kAdviseDump) {
59
- madvise(_arenaBegin, kArenaSize, MADV_DONTDUMP);
60
- }
61
-
62
- // debug("MeshableArena(%p): fd:%4d\t%p-%p\n", this, fd, _arenaBegin, arenaEnd());
63
-
64
- // TODO: move this to runtime
65
- atexit(staticAtExit);
66
- pthread_atfork(staticPrepareForFork, staticAfterForkParent, staticAfterForkChild);
67
- }
68
-
69
- char *uintToStr(char *dst, uint32_t i) {
70
- constexpr size_t maxLen = sizeof("4294967295") + 1;
71
- char buf[maxLen];
72
- memset(buf, 0, sizeof(buf));
73
-
74
- char *digit = buf + maxLen - 2;
75
- // capture the case where i == 0
76
- *digit = '0';
77
- while (i > 0) {
78
- hard_assert(reinterpret_cast<uintptr_t>(digit) >= reinterpret_cast<uintptr_t>(buf));
79
- const char c = '0' + (i % 10);
80
- *digit = c;
81
- digit--;
82
- i /= 10;
83
- }
84
- if (*digit == '\0') {
85
- digit++;
86
- }
87
-
88
- return strcat(dst, digit);
89
- }
90
-
91
- char *MeshableArena::openSpanDir(int pid) {
92
- constexpr size_t buf_len = 128;
93
-
94
- for (auto tmpDir : TMP_DIRS) {
95
- for (size_t i = 0; i < 1024; i++) {
96
- char buf[buf_len];
97
- memset(buf, 0, buf_len);
98
-
99
- // on some platforms snprintf actually calls out to malloc,
100
- // despite us passing in a reasonable buffer. Since what we're doing is
101
- // reasonably simple, just build the path ourselves to avoid this.
102
- char *next = buf;
103
- hard_assert(strlen(tmpDir) < buf_len);
104
- next = strcat(next, tmpDir);
105
- next = strcat(next, "/alloc-mesh-");
106
- next = uintToStr(next, pid);
107
- next = strcat(next, ".");
108
- next = uintToStr(next, i);
109
-
110
- // ensure we haven't overflown our buffer
111
- hard_assert(reinterpret_cast<uintptr_t>(next) <= reinterpret_cast<uintptr_t>(buf) + buf_len);
112
-
113
- int result = mkdir(buf, 0755);
114
- if (result != 0) {
115
- if (errno == EEXIST) {
116
- // we will get EEXIST if we have re-execed -- we need to use a
117
- // new directory because we could have dropped privileges in
118
- // the meantime.
119
- continue;
120
- } else {
121
- // otherwise it is likely that the parent tmp directory
122
- // doesn't exist or we don't have permissions in it.
123
- break;
124
- }
125
- }
126
-
127
- char *spanDir = reinterpret_cast<char *>(internal::Heap().malloc(strlen(buf) + 1));
128
- strcpy(spanDir, buf);
129
- return spanDir;
130
- }
131
- }
132
-
133
- return nullptr;
134
- }
135
-
136
- void MeshableArena::expandArena(size_t minPagesAdded) {
137
- const size_t pageCount = std::max(minPagesAdded, kMinArenaExpansion);
138
-
139
- Span expansion(_end, pageCount);
140
- _end += pageCount;
141
-
142
- if (unlikely(_end >= kArenaSize / kPageSize)) {
143
- debug("Mesh: arena exhausted: current arena size is %.1f GB; recompile with larger arena size.",
144
- kArenaSize / 1024.0 / 1024.0 / 1024.0);
145
- abort();
146
- }
147
-
148
- _clean[expansion.spanClass()].push_back(expansion);
149
- }
150
-
151
- bool MeshableArena::findPagesInner(internal::vector<Span> freeSpans[kSpanClassCount], const size_t i,
152
- const size_t pageCount, Span &result) {
153
- internal::vector<Span> &spanList = freeSpans[i];
154
- if (spanList.empty())
155
- return false;
156
-
157
- size_t oldLen = spanList.size();
158
-
159
- if (i == kSpanClassCount - 1 && spanList.back().length < pageCount) {
160
- // the final span class contains (and is the only class to
161
- // contain) variable-size spans, so we need to make sure we
162
- // search through all candidates in this case.
163
- for (size_t j = 0; j < spanList.size() - 1; j++) {
164
- if (spanList[j].length >= pageCount) {
165
- std::swap(spanList[j], spanList.back());
166
- break;
167
- }
168
- }
169
-
170
- // check that we found something in the above loop. this would be
171
- // our last loop iteration anyway
172
- if (spanList.back().length < pageCount) {
173
- return false;
174
- }
175
- }
176
-
177
- Span span = spanList.back();
178
- spanList.pop_back();
179
-
180
- #ifndef NDEBUG
181
- d_assert_msg(oldLen == spanList.size() + 1, "pageCount:%zu,%zu -- %zu/%zu", pageCount, i, oldLen, spanList.size());
182
- for (size_t j = 0; j < spanList.size(); j++) {
183
- d_assert(spanList[j] != span);
184
- }
185
- #endif
186
-
187
- // this invariant should be maintained
188
- d_assert(span.length >= i + 1);
189
- d_assert(span.length >= pageCount);
190
-
191
- // put the part we don't need back in the reuse pile
192
- Span rest = span.splitAfter(pageCount);
193
- if (!rest.empty()) {
194
- freeSpans[rest.spanClass()].push_back(rest);
195
- }
196
- d_assert(span.length == pageCount);
197
-
198
- result = span;
199
- return true;
200
- }
201
-
202
- bool MeshableArena::findPages(const size_t pageCount, Span &result, internal::PageType &type) {
203
- // Search through all dirty spans first. We don't worry about
204
- // fragmenting dirty pages, as being able to reuse dirty pages means
205
- // we don't increase RSS.
206
- for (size_t i = Span(0, pageCount).spanClass(); i < kSpanClassCount; i++) {
207
- if (findPagesInner(_dirty, i, pageCount, result)) {
208
- type = internal::PageType::Dirty;
209
- return true;
210
- }
211
- }
212
-
213
- // if no dirty pages are available, search clean pages. An allocated
214
- // clean page (once it is written to) means an increased RSS.
215
- for (size_t i = Span(0, pageCount).spanClass(); i < kSpanClassCount; i++) {
216
- if (findPagesInner(_clean, i, pageCount, result)) {
217
- type = internal::PageType::Clean;
218
- return true;
219
- }
220
- }
221
-
222
- return false;
223
- }
224
-
225
- Span MeshableArena::reservePages(const size_t pageCount, const size_t pageAlignment) {
226
- d_assert(pageCount >= 1);
227
-
228
- internal::PageType flags(internal::PageType::Unknown);
229
- Span result(0, 0);
230
- auto ok = findPages(pageCount, result, flags);
231
- if (!ok) {
232
- expandArena(pageCount);
233
- ok = findPages(pageCount, result, flags);
234
- hard_assert(ok);
235
- }
236
-
237
- d_assert(!result.empty());
238
- d_assert(flags != internal::PageType::Unknown);
239
-
240
- if (unlikely(pageAlignment > 1 && ((ptrvalFromOffset(result.offset) / kPageSize) % pageAlignment != 0))) {
241
- freeSpan(result, flags);
242
- // recurse once, asking for enough extra space that we are sure to
243
- // be able to find an aligned offset of pageCount pages within.
244
- result = reservePages(pageCount + 2 * pageAlignment, 1);
245
-
246
- const size_t alignment = pageAlignment * kPageSize;
247
- const uintptr_t alignedPtr = (ptrvalFromOffset(result.offset) + alignment - 1) & ~(alignment - 1);
248
- const auto alignedOff = offsetFor(reinterpret_cast<void *>(alignedPtr));
249
- d_assert(alignedOff >= result.offset);
250
- d_assert(alignedOff < result.offset + result.length);
251
- const auto unwantedPageCount = alignedOff - result.offset;
252
- auto alignedResult = result.splitAfter(unwantedPageCount);
253
- d_assert(alignedResult.offset == alignedOff);
254
- freeSpan(result, flags);
255
- const auto excess = alignedResult.splitAfter(pageCount);
256
- freeSpan(excess, flags);
257
- result = alignedResult;
258
- }
259
-
260
- return result;
261
- }
262
-
263
- template <typename Func>
264
- static void forEachFree(const internal::vector<Span> freeSpans[kSpanClassCount], const Func func) {
265
- for (size_t i = 0; i < kSpanClassCount; i++) {
266
- if (freeSpans[i].empty())
267
- continue;
268
-
269
- for (size_t j = 0; j < freeSpans[i].size(); j++) {
270
- auto span = freeSpans[i][j];
271
- func(span);
272
- }
273
- }
274
- }
275
-
276
- internal::RelaxedBitmap MeshableArena::allocatedBitmap(bool includeDirty) const {
277
- internal::RelaxedBitmap bitmap(_end);
278
-
279
- // we can build up a bitmap of in-use pages here by looking at the
280
- // arena start and end addresses (to compute the number of
281
- // bits/pages), set all bits to 1, then iterate through our _clean
282
- // and _dirty lists unsetting pages that aren't in use.
283
-
284
- bitmap.setAll(_end);
285
-
286
- auto unmarkPages = [&](const Span &span) {
287
- for (size_t k = 0; k < span.length; k++) {
288
- #ifndef NDEBUG
289
- if (!bitmap.isSet(span.offset + k)) {
290
- debug("arena: bit %zu already unset 1 (%zu/%zu)\n", k, span.offset, span.length);
291
- }
292
- #endif
293
- bitmap.unset(span.offset + k);
294
- }
295
- };
296
-
297
- if (includeDirty)
298
- forEachFree(_dirty, unmarkPages);
299
- forEachFree(_clean, unmarkPages);
300
-
301
- return bitmap;
302
- }
303
-
304
- char *MeshableArena::pageAlloc(Span &result, size_t pageCount, size_t pageAlignment) {
305
- if (pageCount == 0) {
306
- return nullptr;
307
- }
308
-
309
- d_assert(_arenaBegin != nullptr);
310
-
311
- d_assert(pageCount >= 1);
312
- d_assert(pageCount < std::numeric_limits<Length>::max());
313
-
314
- auto span = reservePages(pageCount, pageAlignment);
315
- d_assert(isAligned(span, pageAlignment));
316
-
317
- d_assert(contains(ptrFromOffset(span.offset)));
318
- #ifndef NDEBUG
319
- if (_mhIndex[span.offset].load().hasValue()) {
320
- mesh::debug("----\n");
321
- auto mh = reinterpret_cast<MiniHeap *>(miniheapForArenaOffset(span.offset));
322
- mh->dumpDebug();
323
- }
324
- #endif
325
-
326
- char *ptr = reinterpret_cast<char *>(ptrFromOffset(span.offset));
327
-
328
- if (kAdviseDump) {
329
- madvise(ptr, pageCount * kPageSize, MADV_DODUMP);
330
- }
331
-
332
- result = span;
333
- return ptr;
334
- }
335
-
336
- void MeshableArena::free(void *ptr, size_t sz, internal::PageType type) {
337
- if (unlikely(!contains(ptr))) {
338
- debug("invalid free of %p/%zu", ptr, sz);
339
- return;
340
- }
341
- d_assert(sz > 0);
342
-
343
- d_assert(sz / kPageSize > 0);
344
- d_assert(sz % kPageSize == 0);
345
-
346
- const Span span(offsetFor(ptr), sz / kPageSize);
347
- freeSpan(span, type);
348
- }
349
-
350
- void MeshableArena::partialScavenge() {
351
- forEachFree(_dirty, [&](const Span &span) {
352
- auto ptr = ptrFromOffset(span.offset);
353
- auto sz = span.byteLength();
354
- madvise(ptr, sz, MADV_DONTNEED);
355
- freePhys(ptr, sz);
356
- // don't coalesce, just add to clean
357
- _clean[span.spanClass()].push_back(span);
358
- });
359
-
360
- for (size_t i = 0; i < kSpanClassCount; i++) {
361
- _dirty[i].clear();
362
- internal::vector<Span> empty{};
363
- _dirty[i].swap(empty);
364
- }
365
-
366
- _dirtyPageCount = 0;
367
- }
368
-
369
- void MeshableArena::scavenge(bool force) {
370
- if (!force && _dirtyPageCount < kMinDirtyPageThreshold) {
371
- return;
372
- }
373
-
374
- // the inverse of the allocated bitmap is all of the spans in _clear
375
- // (since we just MADV_DONTNEED'ed everything in dirty)
376
- auto bitmap = allocatedBitmap(false);
377
- bitmap.invert();
378
-
379
- auto markPages = [&](const Span &span) {
380
- for (size_t k = 0; k < span.length; k++) {
381
- #ifndef NDEBUG
382
- if (bitmap.isSet(span.offset + k)) {
383
- debug("arena: bit %zu already set (%zu/%zu) %zu\n", k, span.offset, span.length, bitmap.bitCount());
384
- }
385
- #endif
386
- bitmap.tryToSet(span.offset + k);
387
- }
388
- };
389
-
390
- // first, untrack the spans in the meshed bitmap and mark them in
391
- // the (method-local) unallocated bitmap
392
- std::for_each(_toReset.begin(), _toReset.end(), [&](Span span) {
393
- untrackMeshed(span);
394
- markPages(span);
395
- resetSpanMapping(span);
396
- });
397
-
398
- // now that we've finally reset to identity all delayed-reset
399
- // mappings, empty the list
400
- _toReset.clear();
401
- {
402
- // force freeing our internal allocations
403
- internal::vector<Span> empty{};
404
- _toReset.swap(empty);
405
- }
406
-
407
- _meshedPageCount = _meshedBitmap.inUseCount();
408
- if (_meshedPageCount > _meshedPageCountHWM) {
409
- _meshedPageCountHWM = _meshedPageCount;
410
- // TODO: find rss at peak
411
- }
412
-
413
- forEachFree(_dirty, [&](const Span &span) {
414
- auto ptr = ptrFromOffset(span.offset);
415
- auto sz = span.byteLength();
416
- madvise(ptr, sz, MADV_DONTNEED);
417
- freePhys(ptr, sz);
418
- markPages(span);
419
- });
420
-
421
- for (size_t i = 0; i < kSpanClassCount; i++) {
422
- _dirty[i].clear();
423
- internal::vector<Span> empty{};
424
- _dirty[i].swap(empty);
425
- }
426
-
427
- _dirtyPageCount = 0;
428
-
429
- for (size_t i = 0; i < kSpanClassCount; i++) {
430
- _clean[i].clear();
431
- internal::vector<Span> empty{};
432
- _clean[i].swap(empty);
433
- }
434
-
435
- // coalesce adjacent spans
436
- Span current(0, 0);
437
- for (auto const &i : bitmap) {
438
- if (i == current.offset + current.length) {
439
- current.length++;
440
- continue;
441
- }
442
-
443
- // should only be empty the first time/iteration through
444
- if (!current.empty()) {
445
- _clean[current.spanClass()].push_back(current);
446
- // debug(" clean: %4zu/%4zu\n", current.offset, current.length);
447
- }
448
-
449
- current = Span(i, 1);
450
- }
451
-
452
- // should only be empty the first time/iteration through
453
- if (!current.empty()) {
454
- _clean[current.spanClass()].push_back(current);
455
- // debug(" clean: %4zu/%4zu\n", current.offset, current.length);
456
- }
457
- #ifndef NDEBUG
458
- auto newBitmap = allocatedBitmap();
459
- newBitmap.invert();
460
-
461
- const size_t *bits1 = bitmap.bits();
462
- const size_t *bits2 = newBitmap.bits();
463
- for (size_t i = 0; i < bitmap.byteCount() / sizeof(size_t); i++) {
464
- if (bits1[i] != bits2[i]) {
465
- debug("bitmaps don't match %zu:\n", i);
466
- // debug("\t%s\n", bitmap.to_string().c_str());
467
- // debug("\t%s\n", newBitmap.to_string().c_str());
468
- hard_assert(false);
469
- }
470
- }
471
- #endif
472
- }
473
-
474
- void MeshableArena::freePhys(void *ptr, size_t sz) {
475
- d_assert(contains(ptr));
476
- d_assert(sz > 0);
477
-
478
- d_assert(sz / CPUInfo::PageSize > 0);
479
- d_assert(sz % CPUInfo::PageSize == 0);
480
-
481
- // we madvise(MADV_DONTNEED) elsewhere; this function is only needed
482
- // when our heap is a shared mapping
483
- if (!kMeshingEnabled) {
484
- return;
485
- }
486
-
487
- const off_t off = reinterpret_cast<char *>(ptr) - reinterpret_cast<char *>(_arenaBegin);
488
- #ifndef __APPLE__
489
- int result = fallocate(_fd, FALLOC_FL_PUNCH_HOLE | FALLOC_FL_KEEP_SIZE, off, sz);
490
- d_assert_msg(result == 0, "result(fd %d): %d errno %d (%s)\n", _fd, result, errno, strerror(errno));
491
- #else
492
- #warning macOS version of fallocate goes here
493
- fstore_t store = {F_ALLOCATECONTIG, F_PEOFPOSMODE, 0, (long long)sz, 0};
494
- int result = fcntl(_fd, F_PREALLOCATE, &store);
495
- if (result == -1) {
496
- // try and allocate space with fragments
497
- store.fst_flags = F_ALLOCATEALL;
498
- result = fcntl(_fd, F_PREALLOCATE, &store);
499
- }
500
- // if (result != -1) {
501
- // result = ftruncate(_fd, off+sz);
502
- // }
503
- d_assert(result == 0);
504
- #endif
505
- }
506
-
507
- void MeshableArena::beginMesh(void *keep, void *remove, size_t sz) {
508
- int r = mprotect(remove, sz, PROT_READ);
509
- hard_assert(r == 0);
510
- }
511
-
512
- void MeshableArena::finalizeMesh(void *keep, void *remove, size_t sz) {
513
- // debug("keep: %p, remove: %p\n", keep, remove);
514
- const auto keepOff = offsetFor(keep);
515
- const auto removeOff = offsetFor(remove);
516
-
517
- const size_t pageCount = sz / kPageSize;
518
- const MiniHeapID keepID = _mhIndex[keepOff].load(std::memory_order_acquire);
519
- for (size_t i = 0; i < pageCount; i++) {
520
- setIndex(removeOff + i, keepID);
521
- }
522
-
523
- hard_assert(pageCount < std::numeric_limits<Length>::max());
524
- const Span removedSpan{removeOff, static_cast<Length>(pageCount)};
525
- trackMeshed(removedSpan);
526
-
527
- void *ptr = mmap(remove, sz, HL_MMAP_PROTECTION_MASK, kMapShared | MAP_FIXED, _fd, keepOff * kPageSize);
528
- hard_assert_msg(ptr != MAP_FAILED, "mesh remap failed: %d", errno);
529
- }
530
-
531
- int MeshableArena::openShmSpanFile(size_t sz) {
532
- constexpr size_t buf_len = 64;
533
- char buf[buf_len];
534
- memset(buf, 0, buf_len);
535
-
536
- _spanDir = openSpanDir(getpid());
537
- d_assert(_spanDir != nullptr);
538
-
539
- char *next = strcat(buf, _spanDir);
540
- strcat(next, "/XXXXXX");
541
-
542
- int fd = mkstemp(buf);
543
- if (fd < 0) {
544
- debug("mkstemp: %d (%s)\n", errno, strerror(errno));
545
- abort();
546
- }
547
-
548
- // we only need the file descriptors, not the path to the file in the FS
549
- int err = unlink(buf);
550
- if (err != 0) {
551
- debug("unlink: %d\n", errno);
552
- abort();
553
- }
554
-
555
- // TODO: see if fallocate makes any difference in performance
556
- err = ftruncate(fd, sz);
557
- if (err != 0) {
558
- debug("ftruncate: %d\n", errno);
559
- abort();
560
- }
561
-
562
- // if a new process gets exec'ed, ensure our heap is completely freed.
563
- err = fcntl(fd, F_SETFD, FD_CLOEXEC);
564
- if (err != 0) {
565
- debug("fcntl: %d\n", errno);
566
- abort();
567
- }
568
-
569
- return fd;
570
- }
571
-
572
- #ifdef USE_MEMFD
573
- static int sys_memfd_create(const char *name, unsigned int flags) {
574
- return syscall(__NR_memfd_create, name, flags);
575
- }
576
-
577
- int MeshableArena::openSpanFile(size_t sz) {
578
- errno = 0;
579
- int fd = sys_memfd_create("mesh_arena", MFD_CLOEXEC);
580
- // the call to memfd failed -- fall back to opening a shm file
581
- if (fd < 0) {
582
- return openShmSpanFile(sz);
583
- }
584
-
585
- int err = ftruncate(fd, sz);
586
- if (err != 0) {
587
- debug("ftruncate: %d\n", errno);
588
- abort();
589
- }
590
-
591
- return fd;
592
- }
593
- #else
594
- int MeshableArena::openSpanFile(size_t sz) {
595
- return openShmSpanFile(sz);
596
- }
597
- #endif // USE_MEMFD
598
-
599
- void MeshableArena::staticAtExit() {
600
- d_assert(arenaInstance != nullptr);
601
- if (arenaInstance != nullptr)
602
- reinterpret_cast<MeshableArena *>(arenaInstance)->exit();
603
- }
604
-
605
- void MeshableArena::staticPrepareForFork() {
606
- d_assert(arenaInstance != nullptr);
607
- reinterpret_cast<MeshableArena *>(arenaInstance)->prepareForFork();
608
- }
609
-
610
- void MeshableArena::staticAfterForkParent() {
611
- d_assert(arenaInstance != nullptr);
612
- reinterpret_cast<MeshableArena *>(arenaInstance)->afterForkParent();
613
- }
614
-
615
- void MeshableArena::staticAfterForkChild() {
616
- d_assert(arenaInstance != nullptr);
617
- reinterpret_cast<MeshableArena *>(arenaInstance)->afterForkChild();
618
- }
619
-
620
- void MeshableArena::prepareForFork() {
621
- if (!kMeshingEnabled) {
622
- return;
623
- }
624
-
625
- // debug("%d: prepare fork", getpid());
626
- runtime().heap().lock();
627
- runtime().lock();
628
- internal::Heap().lock();
629
-
630
- int r = mprotect(_arenaBegin, kArenaSize, PROT_READ);
631
- hard_assert(r == 0);
632
-
633
- int err = pipe(_forkPipe);
634
- if (err == -1) {
635
- abort();
636
- }
637
- }
638
-
639
- void MeshableArena::afterForkParent() {
640
- if (!kMeshingEnabled) {
641
- return;
642
- }
643
-
644
- internal::Heap().unlock();
645
-
646
- close(_forkPipe[1]);
647
-
648
- char buf[8];
649
- memset(buf, 0, 8);
650
-
651
- // wait for our child to close + reopen memory. Without this
652
- // fence, we may experience memory corruption?
653
-
654
- while (read(_forkPipe[0], buf, 4) == EAGAIN) {
655
- }
656
- close(_forkPipe[0]);
657
-
658
- d_assert(strcmp(buf, "ok") == 0);
659
-
660
- _forkPipe[0] = -1;
661
- _forkPipe[1] = -1;
662
-
663
- // only after the child has finished copying the heap is it safe to
664
- // go back to read/write
665
- int r = mprotect(_arenaBegin, kArenaSize, PROT_READ | PROT_WRITE);
666
- hard_assert(r == 0);
667
-
668
- // debug("%d: after fork parent", getpid());
669
- runtime().unlock();
670
- runtime().heap().unlock();
671
- }
672
-
673
- void MeshableArena::doAfterForkChild() {
674
- afterForkChild();
675
- }
676
-
677
- void MeshableArena::afterForkChild() {
678
- runtime().updatePid();
679
-
680
- if (!kMeshingEnabled) {
681
- return;
682
- }
683
-
684
- // this function can get called twice
685
- if (_forkPipe[0] == -1) {
686
- return;
687
- }
688
-
689
- // debug("%d: after fork child", getpid());
690
- internal::Heap().unlock();
691
- runtime().unlock();
692
- runtime().heap().unlock();
693
-
694
- close(_forkPipe[0]);
695
-
696
- char *oldSpanDir = _spanDir;
697
-
698
- // open new file for the arena
699
- int newFd = openSpanFile(kArenaSize);
700
-
701
- struct stat fileinfo;
702
- memset(&fileinfo, 0, sizeof(fileinfo));
703
- fstat(newFd, &fileinfo);
704
- d_assert(fileinfo.st_size >= 0 && (size_t)fileinfo.st_size == kArenaSize);
705
-
706
- const int oldFd = _fd;
707
-
708
- const auto bitmap = allocatedBitmap();
709
- for (auto const &i : bitmap) {
710
- int result = internal::copyFile(newFd, oldFd, i * kPageSize, kPageSize);
711
- d_assert(result == CPUInfo::PageSize);
712
- }
713
-
714
- int r = mprotect(_arenaBegin, kArenaSize, PROT_READ | PROT_WRITE);
715
- hard_assert(r == 0);
716
-
717
- // remap the new region over the old
718
- void *ptr = mmap(_arenaBegin, kArenaSize, HL_MMAP_PROTECTION_MASK, kMapShared | MAP_FIXED, newFd, 0);
719
- hard_assert_msg(ptr != MAP_FAILED, "map failed: %d", errno);
720
-
721
- // re-do the meshed mappings
722
- {
723
- internal::unordered_set<MiniHeap *> seenMiniheaps{};
724
-
725
- for (auto const &i : _meshedBitmap) {
726
- MiniHeap *mh = reinterpret_cast<MiniHeap *>(miniheapForArenaOffset(i));
727
- if (seenMiniheaps.find(mh) != seenMiniheaps.end()) {
728
- continue;
729
- }
730
- seenMiniheaps.insert(mh);
731
-
732
- const auto meshCount = mh->meshCount();
733
- d_assert(meshCount > 1);
734
-
735
- const auto sz = mh->spanSize();
736
- const auto keep = reinterpret_cast<void *>(mh->getSpanStart(arenaBegin()));
737
- const auto keepOff = offsetFor(keep);
738
-
739
- const auto base = mh;
740
- base->forEachMeshed([&](const MiniHeap *mh) {
741
- if (!mh->isMeshed())
742
- return false;
743
-
744
- const auto remove = reinterpret_cast<void *>(mh->getSpanStart(arenaBegin()));
745
- const auto removeOff = offsetFor(remove);
746
-
747
- #ifndef NDEBUG
748
- const Length pageCount = sz / kPageSize;
749
- for (size_t i = 0; i < pageCount; i++) {
750
- d_assert(_mhIndex[removeOff + i].load().value() == _mhIndex[keepOff].load().value());
751
- }
752
- #endif
753
-
754
- void *ptr = mmap(remove, sz, HL_MMAP_PROTECTION_MASK, kMapShared | MAP_FIXED, newFd, keepOff * kPageSize);
755
-
756
- hard_assert_msg(ptr != MAP_FAILED, "mesh remap failed: %d", errno);
757
-
758
- return false;
759
- });
760
- }
761
- }
762
-
763
- _fd = newFd;
764
-
765
- internal::Heap().free(oldSpanDir);
766
-
767
- close(oldFd);
768
-
769
- while (write(_forkPipe[1], "ok", strlen("ok")) == EAGAIN) {
770
- }
771
- close(_forkPipe[1]);
772
-
773
- _forkPipe[0] = -1;
774
- _forkPipe[1] = -1;
775
- }
776
- } // namespace mesh