mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -42,8 +42,6 @@ namespace internal {
42
42
  && (info).top <= (space).high() \
43
43
  && (info).limit == (space).high())
44
44
 
45
- intptr_t Page::watermark_invalidated_mark_ = 1 << Page::WATERMARK_INVALIDATED;
46
-
47
45
  // ----------------------------------------------------------------------------
48
46
  // HeapObjectIterator
49
47
 
@@ -149,10 +147,14 @@ PageIterator::PageIterator(PagedSpace* space, Mode mode) : space_(space) {
149
147
  // -----------------------------------------------------------------------------
150
148
  // CodeRange
151
149
 
152
- List<CodeRange::FreeBlock> CodeRange::free_list_(0);
153
- List<CodeRange::FreeBlock> CodeRange::allocation_list_(0);
154
- int CodeRange::current_allocation_block_index_ = 0;
155
- VirtualMemory* CodeRange::code_range_ = NULL;
150
+
151
+ CodeRange::CodeRange()
152
+ : code_range_(NULL),
153
+ free_list_(0),
154
+ allocation_list_(0),
155
+ current_allocation_block_index_(0),
156
+ isolate_(NULL) {
157
+ }
156
158
 
157
159
 
158
160
  bool CodeRange::Setup(const size_t requested) {
@@ -168,7 +170,7 @@ bool CodeRange::Setup(const size_t requested) {
168
170
 
169
171
  // We are sure that we have mapped a block of requested addresses.
170
172
  ASSERT(code_range_->size() == requested);
171
- LOG(NewEvent("CodeRange", code_range_->address(), requested));
173
+ LOG(isolate_, NewEvent("CodeRange", code_range_->address(), requested));
172
174
  allocation_list_.Add(FreeBlock(code_range_->address(), code_range_->size()));
173
175
  current_allocation_block_index_ = 0;
174
176
  return true;
@@ -271,24 +273,24 @@ void CodeRange::TearDown() {
271
273
  // -----------------------------------------------------------------------------
272
274
  // MemoryAllocator
273
275
  //
274
- intptr_t MemoryAllocator::capacity_ = 0;
275
- intptr_t MemoryAllocator::capacity_executable_ = 0;
276
- intptr_t MemoryAllocator::size_ = 0;
277
- intptr_t MemoryAllocator::size_executable_ = 0;
278
-
279
- List<MemoryAllocator::MemoryAllocationCallbackRegistration>
280
- MemoryAllocator::memory_allocation_callbacks_;
281
-
282
- VirtualMemory* MemoryAllocator::initial_chunk_ = NULL;
283
276
 
284
277
  // 270 is an estimate based on the static default heap size of a pair of 256K
285
278
  // semispaces and a 64M old generation.
286
279
  const int kEstimatedNumberOfChunks = 270;
287
- List<MemoryAllocator::ChunkInfo> MemoryAllocator::chunks_(
288
- kEstimatedNumberOfChunks);
289
- List<int> MemoryAllocator::free_chunk_ids_(kEstimatedNumberOfChunks);
290
- int MemoryAllocator::max_nof_chunks_ = 0;
291
- int MemoryAllocator::top_ = 0;
280
+
281
+
282
+ MemoryAllocator::MemoryAllocator()
283
+ : capacity_(0),
284
+ capacity_executable_(0),
285
+ size_(0),
286
+ size_executable_(0),
287
+ initial_chunk_(NULL),
288
+ chunks_(kEstimatedNumberOfChunks),
289
+ free_chunk_ids_(kEstimatedNumberOfChunks),
290
+ max_nof_chunks_(0),
291
+ top_(0),
292
+ isolate_(NULL) {
293
+ }
292
294
 
293
295
 
294
296
  void MemoryAllocator::Push(int free_chunk_id) {
@@ -334,11 +336,6 @@ bool MemoryAllocator::Setup(intptr_t capacity, intptr_t capacity_executable) {
334
336
  }
335
337
 
336
338
 
337
- bool MemoryAllocator::SafeIsInAPageChunk(Address addr) {
338
- return InInitialChunk(addr) || InAllocatedChunks(addr);
339
- }
340
-
341
-
342
339
  void MemoryAllocator::TearDown() {
343
340
  for (int i = 0; i < max_nof_chunks_; i++) {
344
341
  if (chunks_[i].address() != NULL) DeleteChunk(i);
@@ -347,15 +344,11 @@ void MemoryAllocator::TearDown() {
347
344
  free_chunk_ids_.Clear();
348
345
 
349
346
  if (initial_chunk_ != NULL) {
350
- LOG(DeleteEvent("InitialChunk", initial_chunk_->address()));
347
+ LOG(isolate_, DeleteEvent("InitialChunk", initial_chunk_->address()));
351
348
  delete initial_chunk_;
352
349
  initial_chunk_ = NULL;
353
350
  }
354
351
 
355
- FreeChunkTables(&chunk_table_[0],
356
- kChunkTableTopLevelEntries,
357
- kChunkTableLevels);
358
-
359
352
  ASSERT(top_ == max_nof_chunks_); // all chunks are free
360
353
  top_ = 0;
361
354
  capacity_ = 0;
@@ -365,22 +358,6 @@ void MemoryAllocator::TearDown() {
365
358
  }
366
359
 
367
360
 
368
- void MemoryAllocator::FreeChunkTables(uintptr_t* array, int len, int level) {
369
- for (int i = 0; i < len; i++) {
370
- if (array[i] != kUnusedChunkTableEntry) {
371
- uintptr_t* subarray = reinterpret_cast<uintptr_t*>(array[i]);
372
- if (level > 1) {
373
- array[i] = kUnusedChunkTableEntry;
374
- FreeChunkTables(subarray, 1 << kChunkTableBitsPerLevel, level - 1);
375
- } else {
376
- array[i] = kUnusedChunkTableEntry;
377
- }
378
- delete[] subarray;
379
- }
380
- }
381
- }
382
-
383
-
384
361
  void* MemoryAllocator::AllocateRawMemory(const size_t requested,
385
362
  size_t* allocated,
386
363
  Executability executable) {
@@ -393,14 +370,15 @@ void* MemoryAllocator::AllocateRawMemory(const size_t requested,
393
370
  // Check executable memory limit.
394
371
  if (size_executable_ + requested >
395
372
  static_cast<size_t>(capacity_executable_)) {
396
- LOG(StringEvent("MemoryAllocator::AllocateRawMemory",
373
+ LOG(isolate_,
374
+ StringEvent("MemoryAllocator::AllocateRawMemory",
397
375
  "V8 Executable Allocation capacity exceeded"));
398
376
  return NULL;
399
377
  }
400
378
  // Allocate executable memory either from code range or from the
401
379
  // OS.
402
- if (CodeRange::exists()) {
403
- mem = CodeRange::AllocateRawMemory(requested, allocated);
380
+ if (isolate_->code_range()->exists()) {
381
+ mem = isolate_->code_range()->AllocateRawMemory(requested, allocated);
404
382
  } else {
405
383
  mem = OS::Allocate(requested, allocated, true);
406
384
  }
@@ -415,7 +393,7 @@ void* MemoryAllocator::AllocateRawMemory(const size_t requested,
415
393
  #ifdef DEBUG
416
394
  ZapBlock(reinterpret_cast<Address>(mem), alloced);
417
395
  #endif
418
- Counters::memory_allocated.Increment(alloced);
396
+ isolate_->counters()->memory_allocated()->Increment(alloced);
419
397
  return mem;
420
398
  }
421
399
 
@@ -426,12 +404,12 @@ void MemoryAllocator::FreeRawMemory(void* mem,
426
404
  #ifdef DEBUG
427
405
  ZapBlock(reinterpret_cast<Address>(mem), length);
428
406
  #endif
429
- if (CodeRange::contains(static_cast<Address>(mem))) {
430
- CodeRange::FreeRawMemory(mem, length);
407
+ if (isolate_->code_range()->contains(static_cast<Address>(mem))) {
408
+ isolate_->code_range()->FreeRawMemory(mem, length);
431
409
  } else {
432
410
  OS::Free(mem, length);
433
411
  }
434
- Counters::memory_allocated.Decrement(static_cast<int>(length));
412
+ isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(length));
435
413
  size_ -= static_cast<int>(length);
436
414
  if (executable == EXECUTABLE) size_executable_ -= static_cast<int>(length);
437
415
 
@@ -498,7 +476,8 @@ void* MemoryAllocator::ReserveInitialChunk(const size_t requested) {
498
476
 
499
477
  // We are sure that we have mapped a block of requested addresses.
500
478
  ASSERT(initial_chunk_->size() == requested);
501
- LOG(NewEvent("InitialChunk", initial_chunk_->address(), requested));
479
+ LOG(isolate_,
480
+ NewEvent("InitialChunk", initial_chunk_->address(), requested));
502
481
  size_ += static_cast<int>(requested);
503
482
  return initial_chunk_->address();
504
483
  }
@@ -522,14 +501,14 @@ Page* MemoryAllocator::AllocatePages(int requested_pages,
522
501
 
523
502
  void* chunk = AllocateRawMemory(chunk_size, &chunk_size, owner->executable());
524
503
  if (chunk == NULL) return Page::FromAddress(NULL);
525
- LOG(NewEvent("PagedChunk", chunk, chunk_size));
504
+ LOG(isolate_, NewEvent("PagedChunk", chunk, chunk_size));
526
505
 
527
506
  *allocated_pages = PagesInChunk(static_cast<Address>(chunk), chunk_size);
528
507
  // We may 'lose' a page due to alignment.
529
508
  ASSERT(*allocated_pages >= kPagesPerChunk - 1);
530
509
  if (*allocated_pages == 0) {
531
510
  FreeRawMemory(chunk, chunk_size, owner->executable());
532
- LOG(DeleteEvent("PagedChunk", chunk));
511
+ LOG(isolate_, DeleteEvent("PagedChunk", chunk));
533
512
  return Page::FromAddress(NULL);
534
513
  }
535
514
 
@@ -540,8 +519,6 @@ Page* MemoryAllocator::AllocatePages(int requested_pages,
540
519
  PerformAllocationCallback(space, kAllocationActionAllocate, chunk_size);
541
520
  Page* new_pages = InitializePagesInChunk(chunk_id, *allocated_pages, owner);
542
521
 
543
- AddToAllocatedChunks(static_cast<Address>(chunk), chunk_size);
544
-
545
522
  return new_pages;
546
523
  }
547
524
 
@@ -560,7 +537,7 @@ Page* MemoryAllocator::CommitPages(Address start, size_t size,
560
537
  #ifdef DEBUG
561
538
  ZapBlock(start, size);
562
539
  #endif
563
- Counters::memory_allocated.Increment(static_cast<int>(size));
540
+ isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
564
541
 
565
542
  // So long as we correctly overestimated the number of chunks we should not
566
543
  // run out of chunk ids.
@@ -584,7 +561,7 @@ bool MemoryAllocator::CommitBlock(Address start,
584
561
  #ifdef DEBUG
585
562
  ZapBlock(start, size);
586
563
  #endif
587
- Counters::memory_allocated.Increment(static_cast<int>(size));
564
+ isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
588
565
  return true;
589
566
  }
590
567
 
@@ -597,7 +574,7 @@ bool MemoryAllocator::UncommitBlock(Address start, size_t size) {
597
574
  ASSERT(InInitialChunk(start + size - 1));
598
575
 
599
576
  if (!initial_chunk_->Uncommit(start, size)) return false;
600
- Counters::memory_allocated.Decrement(static_cast<int>(size));
577
+ isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(size));
601
578
  return true;
602
579
  }
603
580
 
@@ -628,6 +605,7 @@ Page* MemoryAllocator::InitializePagesInChunk(int chunk_id, int pages_in_chunk,
628
605
  Address page_addr = low;
629
606
  for (int i = 0; i < pages_in_chunk; i++) {
630
607
  Page* p = Page::FromAddress(page_addr);
608
+ p->heap_ = owner->heap();
631
609
  p->opaque_header = OffsetFrom(page_addr + Page::kPageSize) | chunk_id;
632
610
  p->InvalidateWatermark(true);
633
611
  p->SetIsLargeObjectPage(false);
@@ -697,11 +675,11 @@ void MemoryAllocator::DeleteChunk(int chunk_id) {
697
675
  // TODO(1240712): VirtualMemory::Uncommit has a return value which
698
676
  // is ignored here.
699
677
  initial_chunk_->Uncommit(c.address(), c.size());
700
- Counters::memory_allocated.Decrement(static_cast<int>(c.size()));
678
+ Counters* counters = isolate_->counters();
679
+ counters->memory_allocated()->Decrement(static_cast<int>(c.size()));
701
680
  } else {
702
- RemoveFromAllocatedChunks(c.address(), c.size());
703
- LOG(DeleteEvent("PagedChunk", c.address()));
704
- ObjectSpace space = static_cast<ObjectSpace>(1 << c.owner()->identity());
681
+ LOG(isolate_, DeleteEvent("PagedChunk", c.address()));
682
+ ObjectSpace space = static_cast<ObjectSpace>(1 << c.owner_identity());
705
683
  size_t size = c.size();
706
684
  FreeRawMemory(c.address(), size, c.executable());
707
685
  PerformAllocationCallback(space, kAllocationActionFree, size);
@@ -813,131 +791,14 @@ Page* MemoryAllocator::RelinkPagesInChunk(int chunk_id,
813
791
  }
814
792
 
815
793
 
816
- void MemoryAllocator::AddToAllocatedChunks(Address addr, intptr_t size) {
817
- ASSERT(size == kChunkSize);
818
- uintptr_t int_address = reinterpret_cast<uintptr_t>(addr);
819
- AddChunkUsingAddress(int_address, int_address);
820
- AddChunkUsingAddress(int_address, int_address + size - 1);
821
- }
822
-
823
-
824
- void MemoryAllocator::AddChunkUsingAddress(uintptr_t chunk_start,
825
- uintptr_t chunk_index_base) {
826
- uintptr_t* fine_grained = AllocatedChunksFinder(
827
- chunk_table_,
828
- chunk_index_base,
829
- kChunkSizeLog2 + (kChunkTableLevels - 1) * kChunkTableBitsPerLevel,
830
- kCreateTablesAsNeeded);
831
- int index = FineGrainedIndexForAddress(chunk_index_base);
832
- if (fine_grained[index] != kUnusedChunkTableEntry) index++;
833
- ASSERT(fine_grained[index] == kUnusedChunkTableEntry);
834
- fine_grained[index] = chunk_start;
835
- }
836
-
837
-
838
- void MemoryAllocator::RemoveFromAllocatedChunks(Address addr, intptr_t size) {
839
- ASSERT(size == kChunkSize);
840
- uintptr_t int_address = reinterpret_cast<uintptr_t>(addr);
841
- RemoveChunkFoundUsingAddress(int_address, int_address);
842
- RemoveChunkFoundUsingAddress(int_address, int_address + size - 1);
843
- }
844
-
845
-
846
- void MemoryAllocator::RemoveChunkFoundUsingAddress(
847
- uintptr_t chunk_start,
848
- uintptr_t chunk_index_base) {
849
- uintptr_t* fine_grained = AllocatedChunksFinder(
850
- chunk_table_,
851
- chunk_index_base,
852
- kChunkSizeLog2 + (kChunkTableLevels - 1) * kChunkTableBitsPerLevel,
853
- kDontCreateTables);
854
- // Can't remove an entry that's not there.
855
- ASSERT(fine_grained != kUnusedChunkTableEntry);
856
- int index = FineGrainedIndexForAddress(chunk_index_base);
857
- ASSERT(fine_grained[index] != kUnusedChunkTableEntry);
858
- if (fine_grained[index] != chunk_start) {
859
- index++;
860
- ASSERT(fine_grained[index] == chunk_start);
861
- fine_grained[index] = kUnusedChunkTableEntry;
862
- } else {
863
- // If only one of the entries is used it must be the first, since
864
- // InAllocatedChunks relies on that. Move things around so that this is
865
- // the case.
866
- fine_grained[index] = fine_grained[index + 1];
867
- fine_grained[index + 1] = kUnusedChunkTableEntry;
868
- }
869
- }
870
-
871
-
872
- bool MemoryAllocator::InAllocatedChunks(Address addr) {
873
- uintptr_t int_address = reinterpret_cast<uintptr_t>(addr);
874
- uintptr_t* fine_grained = AllocatedChunksFinder(
875
- chunk_table_,
876
- int_address,
877
- kChunkSizeLog2 + (kChunkTableLevels - 1) * kChunkTableBitsPerLevel,
878
- kDontCreateTables);
879
- if (fine_grained == NULL) return false;
880
- int index = FineGrainedIndexForAddress(int_address);
881
- if (fine_grained[index] == kUnusedChunkTableEntry) return false;
882
- uintptr_t entry = fine_grained[index];
883
- if (entry <= int_address && entry + kChunkSize > int_address) return true;
884
- index++;
885
- if (fine_grained[index] == kUnusedChunkTableEntry) return false;
886
- entry = fine_grained[index];
887
- if (entry <= int_address && entry + kChunkSize > int_address) return true;
888
- return false;
889
- }
890
-
891
-
892
- uintptr_t* MemoryAllocator::AllocatedChunksFinder(
893
- uintptr_t* table,
894
- uintptr_t address,
895
- int bit_position,
896
- CreateTables create_as_needed) {
897
- if (bit_position == kChunkSizeLog2) {
898
- return table;
899
- }
900
- ASSERT(bit_position >= kChunkSizeLog2 + kChunkTableBitsPerLevel);
901
- int index =
902
- ((address >> bit_position) &
903
- ((V8_INTPTR_C(1) << kChunkTableBitsPerLevel) - 1));
904
- uintptr_t more_fine_grained_address =
905
- address & ((V8_INTPTR_C(1) << bit_position) - 1);
906
- ASSERT((table == chunk_table_ && index < kChunkTableTopLevelEntries) ||
907
- (table != chunk_table_ && index < 1 << kChunkTableBitsPerLevel));
908
- uintptr_t* more_fine_grained_table =
909
- reinterpret_cast<uintptr_t*>(table[index]);
910
- if (more_fine_grained_table == kUnusedChunkTableEntry) {
911
- if (create_as_needed == kDontCreateTables) return NULL;
912
- int words_needed = 1 << kChunkTableBitsPerLevel;
913
- if (bit_position == kChunkTableBitsPerLevel + kChunkSizeLog2) {
914
- words_needed =
915
- (1 << kChunkTableBitsPerLevel) * kChunkTableFineGrainedWordsPerEntry;
916
- }
917
- more_fine_grained_table = new uintptr_t[words_needed];
918
- for (int i = 0; i < words_needed; i++) {
919
- more_fine_grained_table[i] = kUnusedChunkTableEntry;
920
- }
921
- table[index] = reinterpret_cast<uintptr_t>(more_fine_grained_table);
922
- }
923
- return AllocatedChunksFinder(
924
- more_fine_grained_table,
925
- more_fine_grained_address,
926
- bit_position - kChunkTableBitsPerLevel,
927
- create_as_needed);
928
- }
929
-
930
-
931
- uintptr_t MemoryAllocator::chunk_table_[kChunkTableTopLevelEntries];
932
-
933
-
934
794
  // -----------------------------------------------------------------------------
935
795
  // PagedSpace implementation
936
796
 
937
- PagedSpace::PagedSpace(intptr_t max_capacity,
797
+ PagedSpace::PagedSpace(Heap* heap,
798
+ intptr_t max_capacity,
938
799
  AllocationSpace id,
939
800
  Executability executable)
940
- : Space(id, executable) {
801
+ : Space(heap, id, executable) {
941
802
  max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
942
803
  * Page::kObjectAreaSize;
943
804
  accounting_stats_.Clear();
@@ -958,15 +819,17 @@ bool PagedSpace::Setup(Address start, size_t size) {
958
819
  // contain at least one page, ignore it and allocate instead.
959
820
  int pages_in_chunk = PagesInChunk(start, size);
960
821
  if (pages_in_chunk > 0) {
961
- first_page_ = MemoryAllocator::CommitPages(RoundUp(start, Page::kPageSize),
962
- Page::kPageSize * pages_in_chunk,
963
- this, &num_pages);
822
+ first_page_ = Isolate::Current()->memory_allocator()->CommitPages(
823
+ RoundUp(start, Page::kPageSize),
824
+ Page::kPageSize * pages_in_chunk,
825
+ this, &num_pages);
964
826
  } else {
965
827
  int requested_pages =
966
828
  Min(MemoryAllocator::kPagesPerChunk,
967
829
  static_cast<int>(max_capacity_ / Page::kObjectAreaSize));
968
830
  first_page_ =
969
- MemoryAllocator::AllocatePages(requested_pages, &num_pages, this);
831
+ Isolate::Current()->memory_allocator()->AllocatePages(
832
+ requested_pages, &num_pages, this);
970
833
  if (!first_page_->is_valid()) return false;
971
834
  }
972
835
 
@@ -999,7 +862,7 @@ bool PagedSpace::HasBeenSetup() {
999
862
 
1000
863
 
1001
864
  void PagedSpace::TearDown() {
1002
- MemoryAllocator::FreeAllPages(this);
865
+ Isolate::Current()->memory_allocator()->FreeAllPages(this);
1003
866
  first_page_ = NULL;
1004
867
  accounting_stats_.Clear();
1005
868
  }
@@ -1010,8 +873,9 @@ void PagedSpace::TearDown() {
1010
873
  void PagedSpace::Protect() {
1011
874
  Page* page = first_page_;
1012
875
  while (page->is_valid()) {
1013
- MemoryAllocator::ProtectChunkFromPage(page);
1014
- page = MemoryAllocator::FindLastPageInSameChunk(page)->next_page();
876
+ Isolate::Current()->memory_allocator()->ProtectChunkFromPage(page);
877
+ page = Isolate::Current()->memory_allocator()->
878
+ FindLastPageInSameChunk(page)->next_page();
1015
879
  }
1016
880
  }
1017
881
 
@@ -1019,8 +883,9 @@ void PagedSpace::Protect() {
1019
883
  void PagedSpace::Unprotect() {
1020
884
  Page* page = first_page_;
1021
885
  while (page->is_valid()) {
1022
- MemoryAllocator::UnprotectChunkFromPage(page);
1023
- page = MemoryAllocator::FindLastPageInSameChunk(page)->next_page();
886
+ Isolate::Current()->memory_allocator()->UnprotectChunkFromPage(page);
887
+ page = Isolate::Current()->memory_allocator()->
888
+ FindLastPageInSameChunk(page)->next_page();
1024
889
  }
1025
890
  }
1026
891
 
@@ -1038,7 +903,7 @@ void PagedSpace::MarkAllPagesClean() {
1038
903
  MaybeObject* PagedSpace::FindObject(Address addr) {
1039
904
  // Note: this function can only be called before or after mark-compact GC
1040
905
  // because it accesses map pointers.
1041
- ASSERT(!MarkCompactCollector::in_use());
906
+ ASSERT(!heap()->mark_compact_collector()->in_use());
1042
907
 
1043
908
  if (!Contains(addr)) return Failure::Exception();
1044
909
 
@@ -1158,13 +1023,14 @@ bool PagedSpace::Expand(Page* last_page) {
1158
1023
  if (available_pages < MemoryAllocator::kPagesPerChunk) return false;
1159
1024
 
1160
1025
  int desired_pages = Min(available_pages, MemoryAllocator::kPagesPerChunk);
1161
- Page* p = MemoryAllocator::AllocatePages(desired_pages, &desired_pages, this);
1026
+ Page* p = heap()->isolate()->memory_allocator()->AllocatePages(
1027
+ desired_pages, &desired_pages, this);
1162
1028
  if (!p->is_valid()) return false;
1163
1029
 
1164
1030
  accounting_stats_.ExpandSpace(desired_pages * Page::kObjectAreaSize);
1165
1031
  ASSERT(Capacity() <= max_capacity_);
1166
1032
 
1167
- MemoryAllocator::SetNextPage(last_page, p);
1033
+ heap()->isolate()->memory_allocator()->SetNextPage(last_page, p);
1168
1034
 
1169
1035
  // Sequentially clear region marks of new pages and and cache the
1170
1036
  // new last page in the space.
@@ -1207,8 +1073,9 @@ void PagedSpace::Shrink() {
1207
1073
  }
1208
1074
 
1209
1075
  // Free pages after top_page.
1210
- Page* p = MemoryAllocator::FreePages(top_page->next_page());
1211
- MemoryAllocator::SetNextPage(top_page, p);
1076
+ Page* p = heap()->isolate()->memory_allocator()->
1077
+ FreePages(top_page->next_page());
1078
+ heap()->isolate()->memory_allocator()->SetNextPage(top_page, p);
1212
1079
 
1213
1080
  // Find out how many pages we failed to free and update last_page_.
1214
1081
  // Please note pages can only be freed in whole chunks.
@@ -1230,7 +1097,8 @@ bool PagedSpace::EnsureCapacity(int capacity) {
1230
1097
  Page* last_page = AllocationTopPage();
1231
1098
  Page* next_page = last_page->next_page();
1232
1099
  while (next_page->is_valid()) {
1233
- last_page = MemoryAllocator::FindLastPageInSameChunk(next_page);
1100
+ last_page = heap()->isolate()->memory_allocator()->
1101
+ FindLastPageInSameChunk(next_page);
1234
1102
  next_page = last_page->next_page();
1235
1103
  }
1236
1104
 
@@ -1239,7 +1107,8 @@ bool PagedSpace::EnsureCapacity(int capacity) {
1239
1107
  if (!Expand(last_page)) return false;
1240
1108
  ASSERT(last_page->next_page()->is_valid());
1241
1109
  last_page =
1242
- MemoryAllocator::FindLastPageInSameChunk(last_page->next_page());
1110
+ heap()->isolate()->memory_allocator()->FindLastPageInSameChunk(
1111
+ last_page->next_page());
1243
1112
  } while (Capacity() < capacity);
1244
1113
 
1245
1114
  return true;
@@ -1259,7 +1128,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
1259
1128
  // space.
1260
1129
  ASSERT(allocation_info_.VerifyPagedAllocation());
1261
1130
  Page* top_page = Page::FromAllocationTop(allocation_info_.top);
1262
- ASSERT(MemoryAllocator::IsPageInSpace(top_page, this));
1131
+ ASSERT(heap()->isolate()->memory_allocator()->IsPageInSpace(top_page, this));
1263
1132
 
1264
1133
  // Loop over all the pages.
1265
1134
  bool above_allocation_top = false;
@@ -1284,7 +1153,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
1284
1153
  // be in map space.
1285
1154
  Map* map = object->map();
1286
1155
  ASSERT(map->IsMap());
1287
- ASSERT(Heap::map_space()->Contains(map));
1156
+ ASSERT(heap()->map_space()->Contains(map));
1288
1157
 
1289
1158
  // Perform space-specific object verification.
1290
1159
  VerifyObject(object);
@@ -1320,8 +1189,8 @@ bool NewSpace::Setup(Address start, int size) {
1320
1189
  // start and size. The provided space is divided into two semi-spaces.
1321
1190
  // To support fast containment testing in the new space, the size of
1322
1191
  // this chunk must be a power of two and it must be aligned to its size.
1323
- int initial_semispace_capacity = Heap::InitialSemiSpaceSize();
1324
- int maximum_semispace_capacity = Heap::MaxSemiSpaceSize();
1192
+ int initial_semispace_capacity = heap()->InitialSemiSpaceSize();
1193
+ int maximum_semispace_capacity = heap()->MaxSemiSpaceSize();
1325
1194
 
1326
1195
  ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
1327
1196
  ASSERT(IsPowerOf2(maximum_semispace_capacity));
@@ -1337,7 +1206,7 @@ bool NewSpace::Setup(Address start, int size) {
1337
1206
  #undef SET_NAME
1338
1207
  #endif
1339
1208
 
1340
- ASSERT(size == 2 * Heap::ReservedSemiSpaceSize());
1209
+ ASSERT(size == 2 * heap()->ReservedSemiSpaceSize());
1341
1210
  ASSERT(IsAddressAligned(start, size, 0));
1342
1211
 
1343
1212
  if (!to_space_.Setup(start,
@@ -1392,16 +1261,16 @@ void NewSpace::TearDown() {
1392
1261
  #ifdef ENABLE_HEAP_PROTECTION
1393
1262
 
1394
1263
  void NewSpace::Protect() {
1395
- MemoryAllocator::Protect(ToSpaceLow(), Capacity());
1396
- MemoryAllocator::Protect(FromSpaceLow(), Capacity());
1264
+ heap()->isolate()->memory_allocator()->Protect(ToSpaceLow(), Capacity());
1265
+ heap()->isolate()->memory_allocator()->Protect(FromSpaceLow(), Capacity());
1397
1266
  }
1398
1267
 
1399
1268
 
1400
1269
  void NewSpace::Unprotect() {
1401
- MemoryAllocator::Unprotect(ToSpaceLow(), Capacity(),
1402
- to_space_.executable());
1403
- MemoryAllocator::Unprotect(FromSpaceLow(), Capacity(),
1404
- from_space_.executable());
1270
+ heap()->isolate()->memory_allocator()->Unprotect(ToSpaceLow(), Capacity(),
1271
+ to_space_.executable());
1272
+ heap()->isolate()->memory_allocator()->Unprotect(FromSpaceLow(), Capacity(),
1273
+ from_space_.executable());
1405
1274
  }
1406
1275
 
1407
1276
  #endif
@@ -1495,7 +1364,7 @@ void NewSpace::Verify() {
1495
1364
  // be in map space.
1496
1365
  Map* map = object->map();
1497
1366
  ASSERT(map->IsMap());
1498
- ASSERT(Heap::map_space()->Contains(map));
1367
+ ASSERT(heap()->map_space()->Contains(map));
1499
1368
 
1500
1369
  // The object should not be code or a map.
1501
1370
  ASSERT(!object->IsMap());
@@ -1520,7 +1389,8 @@ void NewSpace::Verify() {
1520
1389
 
1521
1390
  bool SemiSpace::Commit() {
1522
1391
  ASSERT(!is_committed());
1523
- if (!MemoryAllocator::CommitBlock(start_, capacity_, executable())) {
1392
+ if (!heap()->isolate()->memory_allocator()->CommitBlock(
1393
+ start_, capacity_, executable())) {
1524
1394
  return false;
1525
1395
  }
1526
1396
  committed_ = true;
@@ -1530,7 +1400,8 @@ bool SemiSpace::Commit() {
1530
1400
 
1531
1401
  bool SemiSpace::Uncommit() {
1532
1402
  ASSERT(is_committed());
1533
- if (!MemoryAllocator::UncommitBlock(start_, capacity_)) {
1403
+ if (!heap()->isolate()->memory_allocator()->UncommitBlock(
1404
+ start_, capacity_)) {
1534
1405
  return false;
1535
1406
  }
1536
1407
  committed_ = false;
@@ -1576,7 +1447,8 @@ bool SemiSpace::Grow() {
1576
1447
  int maximum_extra = maximum_capacity_ - capacity_;
1577
1448
  int extra = Min(RoundUp(capacity_, static_cast<int>(OS::AllocateAlignment())),
1578
1449
  maximum_extra);
1579
- if (!MemoryAllocator::CommitBlock(high(), extra, executable())) {
1450
+ if (!heap()->isolate()->memory_allocator()->CommitBlock(
1451
+ high(), extra, executable())) {
1580
1452
  return false;
1581
1453
  }
1582
1454
  capacity_ += extra;
@@ -1589,7 +1461,8 @@ bool SemiSpace::GrowTo(int new_capacity) {
1589
1461
  ASSERT(new_capacity > capacity_);
1590
1462
  size_t delta = new_capacity - capacity_;
1591
1463
  ASSERT(IsAligned(delta, OS::AllocateAlignment()));
1592
- if (!MemoryAllocator::CommitBlock(high(), delta, executable())) {
1464
+ if (!heap()->isolate()->memory_allocator()->CommitBlock(
1465
+ high(), delta, executable())) {
1593
1466
  return false;
1594
1467
  }
1595
1468
  capacity_ = new_capacity;
@@ -1602,7 +1475,8 @@ bool SemiSpace::ShrinkTo(int new_capacity) {
1602
1475
  ASSERT(new_capacity < capacity_);
1603
1476
  size_t delta = capacity_ - new_capacity;
1604
1477
  ASSERT(IsAligned(delta, OS::AllocateAlignment()));
1605
- if (!MemoryAllocator::UncommitBlock(high() - delta, delta)) {
1478
+ if (!heap()->isolate()->memory_allocator()->UncommitBlock(
1479
+ high() - delta, delta)) {
1606
1480
  return false;
1607
1481
  }
1608
1482
  capacity_ = new_capacity;
@@ -1650,36 +1524,32 @@ void SemiSpaceIterator::Initialize(NewSpace* space, Address start,
1650
1524
 
1651
1525
 
1652
1526
  #ifdef DEBUG
1653
- // A static array of histogram info for each type.
1654
- static HistogramInfo heap_histograms[LAST_TYPE+1];
1655
- static JSObject::SpillInformation js_spill_information;
1656
-
1657
1527
  // heap_histograms is shared, always clear it before using it.
1658
1528
  static void ClearHistograms() {
1529
+ Isolate* isolate = Isolate::Current();
1659
1530
  // We reset the name each time, though it hasn't changed.
1660
- #define DEF_TYPE_NAME(name) heap_histograms[name].set_name(#name);
1531
+ #define DEF_TYPE_NAME(name) isolate->heap_histograms()[name].set_name(#name);
1661
1532
  INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
1662
1533
  #undef DEF_TYPE_NAME
1663
1534
 
1664
- #define CLEAR_HISTOGRAM(name) heap_histograms[name].clear();
1535
+ #define CLEAR_HISTOGRAM(name) isolate->heap_histograms()[name].clear();
1665
1536
  INSTANCE_TYPE_LIST(CLEAR_HISTOGRAM)
1666
1537
  #undef CLEAR_HISTOGRAM
1667
1538
 
1668
- js_spill_information.Clear();
1539
+ isolate->js_spill_information()->Clear();
1669
1540
  }
1670
1541
 
1671
1542
 
1672
- static int code_kind_statistics[Code::NUMBER_OF_KINDS];
1673
-
1674
-
1675
1543
  static void ClearCodeKindStatistics() {
1544
+ Isolate* isolate = Isolate::Current();
1676
1545
  for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
1677
- code_kind_statistics[i] = 0;
1546
+ isolate->code_kind_statistics()[i] = 0;
1678
1547
  }
1679
1548
  }
1680
1549
 
1681
1550
 
1682
1551
  static void ReportCodeKindStatistics() {
1552
+ Isolate* isolate = Isolate::Current();
1683
1553
  const char* table[Code::NUMBER_OF_KINDS] = { NULL };
1684
1554
 
1685
1555
  #define CASE(name) \
@@ -1694,8 +1564,10 @@ static void ReportCodeKindStatistics() {
1694
1564
  CASE(BUILTIN);
1695
1565
  CASE(LOAD_IC);
1696
1566
  CASE(KEYED_LOAD_IC);
1567
+ CASE(KEYED_EXTERNAL_ARRAY_LOAD_IC);
1697
1568
  CASE(STORE_IC);
1698
1569
  CASE(KEYED_STORE_IC);
1570
+ CASE(KEYED_EXTERNAL_ARRAY_STORE_IC);
1699
1571
  CASE(CALL_IC);
1700
1572
  CASE(KEYED_CALL_IC);
1701
1573
  CASE(BINARY_OP_IC);
@@ -1708,8 +1580,9 @@ static void ReportCodeKindStatistics() {
1708
1580
 
1709
1581
  PrintF("\n Code kind histograms: \n");
1710
1582
  for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
1711
- if (code_kind_statistics[i] > 0) {
1712
- PrintF(" %-20s: %10d bytes\n", table[i], code_kind_statistics[i]);
1583
+ if (isolate->code_kind_statistics()[i] > 0) {
1584
+ PrintF(" %-20s: %10d bytes\n", table[i],
1585
+ isolate->code_kind_statistics()[i]);
1713
1586
  }
1714
1587
  }
1715
1588
  PrintF("\n");
@@ -1717,14 +1590,16 @@ static void ReportCodeKindStatistics() {
1717
1590
 
1718
1591
 
1719
1592
  static int CollectHistogramInfo(HeapObject* obj) {
1593
+ Isolate* isolate = Isolate::Current();
1720
1594
  InstanceType type = obj->map()->instance_type();
1721
1595
  ASSERT(0 <= type && type <= LAST_TYPE);
1722
- ASSERT(heap_histograms[type].name() != NULL);
1723
- heap_histograms[type].increment_number(1);
1724
- heap_histograms[type].increment_bytes(obj->Size());
1596
+ ASSERT(isolate->heap_histograms()[type].name() != NULL);
1597
+ isolate->heap_histograms()[type].increment_number(1);
1598
+ isolate->heap_histograms()[type].increment_bytes(obj->Size());
1725
1599
 
1726
1600
  if (FLAG_collect_heap_spill_statistics && obj->IsJSObject()) {
1727
- JSObject::cast(obj)->IncrementSpillStatistics(&js_spill_information);
1601
+ JSObject::cast(obj)->IncrementSpillStatistics(
1602
+ isolate->js_spill_information());
1728
1603
  }
1729
1604
 
1730
1605
  return obj->Size();
@@ -1732,13 +1607,14 @@ static int CollectHistogramInfo(HeapObject* obj) {
1732
1607
 
1733
1608
 
1734
1609
  static void ReportHistogram(bool print_spill) {
1610
+ Isolate* isolate = Isolate::Current();
1735
1611
  PrintF("\n Object Histogram:\n");
1736
1612
  for (int i = 0; i <= LAST_TYPE; i++) {
1737
- if (heap_histograms[i].number() > 0) {
1613
+ if (isolate->heap_histograms()[i].number() > 0) {
1738
1614
  PrintF(" %-34s%10d (%10d bytes)\n",
1739
- heap_histograms[i].name(),
1740
- heap_histograms[i].number(),
1741
- heap_histograms[i].bytes());
1615
+ isolate->heap_histograms()[i].name(),
1616
+ isolate->heap_histograms()[i].number(),
1617
+ isolate->heap_histograms()[i].bytes());
1742
1618
  }
1743
1619
  }
1744
1620
  PrintF("\n");
@@ -1747,8 +1623,8 @@ static void ReportHistogram(bool print_spill) {
1747
1623
  int string_number = 0;
1748
1624
  int string_bytes = 0;
1749
1625
  #define INCREMENT(type, size, name, camel_name) \
1750
- string_number += heap_histograms[type].number(); \
1751
- string_bytes += heap_histograms[type].bytes();
1626
+ string_number += isolate->heap_histograms()[type].number(); \
1627
+ string_bytes += isolate->heap_histograms()[type].bytes();
1752
1628
  STRING_TYPE_LIST(INCREMENT)
1753
1629
  #undef INCREMENT
1754
1630
  if (string_number > 0) {
@@ -1757,7 +1633,7 @@ static void ReportHistogram(bool print_spill) {
1757
1633
  }
1758
1634
 
1759
1635
  if (FLAG_collect_heap_spill_statistics && print_spill) {
1760
- js_spill_information.Print();
1636
+ isolate->js_spill_information()->Print();
1761
1637
  }
1762
1638
  }
1763
1639
  #endif // DEBUG
@@ -1786,8 +1662,9 @@ void NewSpace::CollectStatistics() {
1786
1662
 
1787
1663
 
1788
1664
  #ifdef ENABLE_LOGGING_AND_PROFILING
1789
- static void DoReportStatistics(HistogramInfo* info, const char* description) {
1790
- LOG(HeapSampleBeginEvent("NewSpace", description));
1665
+ static void DoReportStatistics(Isolate* isolate,
1666
+ HistogramInfo* info, const char* description) {
1667
+ LOG(isolate, HeapSampleBeginEvent("NewSpace", description));
1791
1668
  // Lump all the string types together.
1792
1669
  int string_number = 0;
1793
1670
  int string_bytes = 0;
@@ -1797,17 +1674,19 @@ static void DoReportStatistics(HistogramInfo* info, const char* description) {
1797
1674
  STRING_TYPE_LIST(INCREMENT)
1798
1675
  #undef INCREMENT
1799
1676
  if (string_number > 0) {
1800
- LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
1677
+ LOG(isolate,
1678
+ HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
1801
1679
  }
1802
1680
 
1803
1681
  // Then do the other types.
1804
1682
  for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
1805
1683
  if (info[i].number() > 0) {
1806
- LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
1684
+ LOG(isolate,
1685
+ HeapSampleItemEvent(info[i].name(), info[i].number(),
1807
1686
  info[i].bytes()));
1808
1687
  }
1809
1688
  }
1810
- LOG(HeapSampleEndEvent("NewSpace", description));
1689
+ LOG(isolate, HeapSampleEndEvent("NewSpace", description));
1811
1690
  }
1812
1691
  #endif // ENABLE_LOGGING_AND_PROFILING
1813
1692
 
@@ -1834,8 +1713,9 @@ void NewSpace::ReportStatistics() {
1834
1713
 
1835
1714
  #ifdef ENABLE_LOGGING_AND_PROFILING
1836
1715
  if (FLAG_log_gc) {
1837
- DoReportStatistics(allocated_histogram_, "allocated");
1838
- DoReportStatistics(promoted_histogram_, "promoted");
1716
+ Isolate* isolate = ISOLATE;
1717
+ DoReportStatistics(isolate, allocated_histogram_, "allocated");
1718
+ DoReportStatistics(isolate, promoted_histogram_, "promoted");
1839
1719
  }
1840
1720
  #endif // ENABLE_LOGGING_AND_PROFILING
1841
1721
  }
@@ -1861,7 +1741,7 @@ void NewSpace::RecordPromotion(HeapObject* obj) {
1861
1741
  // -----------------------------------------------------------------------------
1862
1742
  // Free lists for old object spaces implementation
1863
1743
 
1864
- void FreeListNode::set_size(int size_in_bytes) {
1744
+ void FreeListNode::set_size(Heap* heap, int size_in_bytes) {
1865
1745
  ASSERT(size_in_bytes > 0);
1866
1746
  ASSERT(IsAligned(size_in_bytes, kPointerSize));
1867
1747
 
@@ -1873,14 +1753,14 @@ void FreeListNode::set_size(int size_in_bytes) {
1873
1753
  // field and a next pointer, we give it a filler map that gives it the
1874
1754
  // correct size.
1875
1755
  if (size_in_bytes > ByteArray::kHeaderSize) {
1876
- set_map(Heap::raw_unchecked_byte_array_map());
1756
+ set_map(heap->raw_unchecked_byte_array_map());
1877
1757
  // Can't use ByteArray::cast because it fails during deserialization.
1878
1758
  ByteArray* this_as_byte_array = reinterpret_cast<ByteArray*>(this);
1879
1759
  this_as_byte_array->set_length(ByteArray::LengthFor(size_in_bytes));
1880
1760
  } else if (size_in_bytes == kPointerSize) {
1881
- set_map(Heap::raw_unchecked_one_pointer_filler_map());
1761
+ set_map(heap->raw_unchecked_one_pointer_filler_map());
1882
1762
  } else if (size_in_bytes == 2 * kPointerSize) {
1883
- set_map(Heap::raw_unchecked_two_pointer_filler_map());
1763
+ set_map(heap->raw_unchecked_two_pointer_filler_map());
1884
1764
  } else {
1885
1765
  UNREACHABLE();
1886
1766
  }
@@ -1889,9 +1769,9 @@ void FreeListNode::set_size(int size_in_bytes) {
1889
1769
  }
1890
1770
 
1891
1771
 
1892
- Address FreeListNode::next() {
1772
+ Address FreeListNode::next(Heap* heap) {
1893
1773
  ASSERT(IsFreeListNode(this));
1894
- if (map() == Heap::raw_unchecked_byte_array_map()) {
1774
+ if (map() == heap->raw_unchecked_byte_array_map()) {
1895
1775
  ASSERT(Size() >= kNextOffset + kPointerSize);
1896
1776
  return Memory::Address_at(address() + kNextOffset);
1897
1777
  } else {
@@ -1900,9 +1780,9 @@ Address FreeListNode::next() {
1900
1780
  }
1901
1781
 
1902
1782
 
1903
- void FreeListNode::set_next(Address next) {
1783
+ void FreeListNode::set_next(Heap* heap, Address next) {
1904
1784
  ASSERT(IsFreeListNode(this));
1905
- if (map() == Heap::raw_unchecked_byte_array_map()) {
1785
+ if (map() == heap->raw_unchecked_byte_array_map()) {
1906
1786
  ASSERT(Size() >= kNextOffset + kPointerSize);
1907
1787
  Memory::Address_at(address() + kNextOffset) = next;
1908
1788
  } else {
@@ -1911,7 +1791,9 @@ void FreeListNode::set_next(Address next) {
1911
1791
  }
1912
1792
 
1913
1793
 
1914
- OldSpaceFreeList::OldSpaceFreeList(AllocationSpace owner) : owner_(owner) {
1794
+ OldSpaceFreeList::OldSpaceFreeList(Heap* heap, AllocationSpace owner)
1795
+ : heap_(heap),
1796
+ owner_(owner) {
1915
1797
  Reset();
1916
1798
  }
1917
1799
 
@@ -1943,10 +1825,10 @@ void OldSpaceFreeList::RebuildSizeList() {
1943
1825
 
1944
1826
  int OldSpaceFreeList::Free(Address start, int size_in_bytes) {
1945
1827
  #ifdef DEBUG
1946
- MemoryAllocator::ZapBlock(start, size_in_bytes);
1828
+ Isolate::Current()->memory_allocator()->ZapBlock(start, size_in_bytes);
1947
1829
  #endif
1948
1830
  FreeListNode* node = FreeListNode::FromAddress(start);
1949
- node->set_size(size_in_bytes);
1831
+ node->set_size(heap_, size_in_bytes);
1950
1832
 
1951
1833
  // We don't use the freelists in compacting mode. This makes it more like a
1952
1834
  // GC that only has mark-sweep-compact and doesn't have a mark-sweep
@@ -1964,7 +1846,7 @@ int OldSpaceFreeList::Free(Address start, int size_in_bytes) {
1964
1846
 
1965
1847
  // Insert other blocks at the head of an exact free list.
1966
1848
  int index = size_in_bytes >> kPointerSizeLog2;
1967
- node->set_next(free_[index].head_node_);
1849
+ node->set_next(heap_, free_[index].head_node_);
1968
1850
  free_[index].head_node_ = node->address();
1969
1851
  available_ += size_in_bytes;
1970
1852
  needs_rebuild_ = true;
@@ -1983,7 +1865,8 @@ MaybeObject* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) {
1983
1865
  if (free_[index].head_node_ != NULL) {
1984
1866
  FreeListNode* node = FreeListNode::FromAddress(free_[index].head_node_);
1985
1867
  // If this was the last block of its size, remove the size.
1986
- if ((free_[index].head_node_ = node->next()) == NULL) RemoveSize(index);
1868
+ if ((free_[index].head_node_ = node->next(heap_)) == NULL)
1869
+ RemoveSize(index);
1987
1870
  available_ -= size_in_bytes;
1988
1871
  *wasted_bytes = 0;
1989
1872
  ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
@@ -2012,33 +1895,33 @@ MaybeObject* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) {
2012
1895
  finger_ = prev;
2013
1896
  free_[prev].next_size_ = rem;
2014
1897
  // If this was the last block of size cur, remove the size.
2015
- if ((free_[cur].head_node_ = cur_node->next()) == NULL) {
1898
+ if ((free_[cur].head_node_ = cur_node->next(heap_)) == NULL) {
2016
1899
  free_[rem].next_size_ = free_[cur].next_size_;
2017
1900
  } else {
2018
1901
  free_[rem].next_size_ = cur;
2019
1902
  }
2020
1903
  // Add the remainder block.
2021
- rem_node->set_size(rem_bytes);
2022
- rem_node->set_next(free_[rem].head_node_);
1904
+ rem_node->set_size(heap_, rem_bytes);
1905
+ rem_node->set_next(heap_, free_[rem].head_node_);
2023
1906
  free_[rem].head_node_ = rem_node->address();
2024
1907
  } else {
2025
1908
  // If this was the last block of size cur, remove the size.
2026
- if ((free_[cur].head_node_ = cur_node->next()) == NULL) {
1909
+ if ((free_[cur].head_node_ = cur_node->next(heap_)) == NULL) {
2027
1910
  finger_ = prev;
2028
1911
  free_[prev].next_size_ = free_[cur].next_size_;
2029
1912
  }
2030
1913
  if (rem_bytes < kMinBlockSize) {
2031
1914
  // Too-small remainder is wasted.
2032
- rem_node->set_size(rem_bytes);
1915
+ rem_node->set_size(heap_, rem_bytes);
2033
1916
  available_ -= size_in_bytes + rem_bytes;
2034
1917
  *wasted_bytes = rem_bytes;
2035
1918
  return cur_node;
2036
1919
  }
2037
1920
  // Add the remainder block and, if needed, insert its size.
2038
- rem_node->set_size(rem_bytes);
2039
- rem_node->set_next(free_[rem].head_node_);
1921
+ rem_node->set_size(heap_, rem_bytes);
1922
+ rem_node->set_next(heap_, free_[rem].head_node_);
2040
1923
  free_[rem].head_node_ = rem_node->address();
2041
- if (rem_node->next() == NULL) InsertSize(rem);
1924
+ if (rem_node->next(heap_) == NULL) InsertSize(rem);
2042
1925
  }
2043
1926
  available_ -= size_in_bytes;
2044
1927
  *wasted_bytes = 0;
@@ -2051,7 +1934,7 @@ void OldSpaceFreeList::MarkNodes() {
2051
1934
  Address cur_addr = free_[i].head_node_;
2052
1935
  while (cur_addr != NULL) {
2053
1936
  FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
2054
- cur_addr = cur_node->next();
1937
+ cur_addr = cur_node->next(heap_);
2055
1938
  cur_node->SetMark();
2056
1939
  }
2057
1940
  }
@@ -2065,7 +1948,7 @@ bool OldSpaceFreeList::Contains(FreeListNode* node) {
2065
1948
  while (cur_addr != NULL) {
2066
1949
  FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
2067
1950
  if (cur_node == node) return true;
2068
- cur_addr = cur_node->next();
1951
+ cur_addr = cur_node->next(heap_);
2069
1952
  }
2070
1953
  }
2071
1954
  return false;
@@ -2073,8 +1956,10 @@ bool OldSpaceFreeList::Contains(FreeListNode* node) {
2073
1956
  #endif
2074
1957
 
2075
1958
 
2076
- FixedSizeFreeList::FixedSizeFreeList(AllocationSpace owner, int object_size)
2077
- : owner_(owner), object_size_(object_size) {
1959
+ FixedSizeFreeList::FixedSizeFreeList(Heap* heap,
1960
+ AllocationSpace owner,
1961
+ int object_size)
1962
+ : heap_(heap), owner_(owner), object_size_(object_size) {
2078
1963
  Reset();
2079
1964
  }
2080
1965
 
@@ -2087,17 +1972,17 @@ void FixedSizeFreeList::Reset() {
2087
1972
 
2088
1973
  void FixedSizeFreeList::Free(Address start) {
2089
1974
  #ifdef DEBUG
2090
- MemoryAllocator::ZapBlock(start, object_size_);
1975
+ Isolate::Current()->memory_allocator()->ZapBlock(start, object_size_);
2091
1976
  #endif
2092
1977
  // We only use the freelists with mark-sweep.
2093
- ASSERT(!MarkCompactCollector::IsCompacting());
1978
+ ASSERT(!HEAP->mark_compact_collector()->IsCompacting());
2094
1979
  FreeListNode* node = FreeListNode::FromAddress(start);
2095
- node->set_size(object_size_);
2096
- node->set_next(NULL);
1980
+ node->set_size(heap_, object_size_);
1981
+ node->set_next(heap_, NULL);
2097
1982
  if (head_ == NULL) {
2098
1983
  tail_ = head_ = node->address();
2099
1984
  } else {
2100
- FreeListNode::FromAddress(tail_)->set_next(node->address());
1985
+ FreeListNode::FromAddress(tail_)->set_next(heap_, node->address());
2101
1986
  tail_ = node->address();
2102
1987
  }
2103
1988
  available_ += object_size_;
@@ -2111,7 +1996,7 @@ MaybeObject* FixedSizeFreeList::Allocate() {
2111
1996
 
2112
1997
  ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
2113
1998
  FreeListNode* node = FreeListNode::FromAddress(head_);
2114
- head_ = node->next();
1999
+ head_ = node->next(heap_);
2115
2000
  available_ -= object_size_;
2116
2001
  return node;
2117
2002
  }
@@ -2121,7 +2006,7 @@ void FixedSizeFreeList::MarkNodes() {
2121
2006
  Address cur_addr = head_;
2122
2007
  while (cur_addr != NULL && cur_addr != tail_) {
2123
2008
  FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
2124
- cur_addr = cur_node->next();
2009
+ cur_addr = cur_node->next(heap_);
2125
2010
  cur_node->SetMark();
2126
2011
  }
2127
2012
  }
@@ -2217,13 +2102,14 @@ void PagedSpace::FreePages(Page* prev, Page* last) {
2217
2102
  first_page_ = last->next_page();
2218
2103
  } else {
2219
2104
  first = prev->next_page();
2220
- MemoryAllocator::SetNextPage(prev, last->next_page());
2105
+ heap()->isolate()->memory_allocator()->SetNextPage(
2106
+ prev, last->next_page());
2221
2107
  }
2222
2108
 
2223
2109
  // Attach it after the last page.
2224
- MemoryAllocator::SetNextPage(last_page_, first);
2110
+ heap()->isolate()->memory_allocator()->SetNextPage(last_page_, first);
2225
2111
  last_page_ = last;
2226
- MemoryAllocator::SetNextPage(last, NULL);
2112
+ heap()->isolate()->memory_allocator()->SetNextPage(last, NULL);
2227
2113
 
2228
2114
  // Clean them up.
2229
2115
  do {
@@ -2262,10 +2148,8 @@ void PagedSpace::RelinkPageListInChunkOrder(bool deallocate_blocks) {
2262
2148
  if (page_list_is_chunk_ordered_) return;
2263
2149
 
2264
2150
  Page* new_last_in_use = Page::FromAddress(NULL);
2265
- MemoryAllocator::RelinkPageListInChunkOrder(this,
2266
- &first_page_,
2267
- &last_page_,
2268
- &new_last_in_use);
2151
+ heap()->isolate()->memory_allocator()->RelinkPageListInChunkOrder(
2152
+ this, &first_page_, &last_page_, &new_last_in_use);
2269
2153
  ASSERT(new_last_in_use->is_valid());
2270
2154
 
2271
2155
  if (new_last_in_use != last_in_use) {
@@ -2282,7 +2166,7 @@ void PagedSpace::RelinkPageListInChunkOrder(bool deallocate_blocks) {
2282
2166
  accounting_stats_.AllocateBytes(size_in_bytes);
2283
2167
  DeallocateBlock(start, size_in_bytes, add_to_freelist);
2284
2168
  } else {
2285
- Heap::CreateFillerObjectAt(start, size_in_bytes);
2169
+ heap()->CreateFillerObjectAt(start, size_in_bytes);
2286
2170
  }
2287
2171
  }
2288
2172
 
@@ -2309,7 +2193,7 @@ void PagedSpace::RelinkPageListInChunkOrder(bool deallocate_blocks) {
2309
2193
  accounting_stats_.AllocateBytes(size_in_bytes);
2310
2194
  DeallocateBlock(start, size_in_bytes, add_to_freelist);
2311
2195
  } else {
2312
- Heap::CreateFillerObjectAt(start, size_in_bytes);
2196
+ heap()->CreateFillerObjectAt(start, size_in_bytes);
2313
2197
  }
2314
2198
  }
2315
2199
  }
@@ -2338,7 +2222,7 @@ bool PagedSpace::ReserveSpace(int bytes) {
2338
2222
  int bytes_left_to_reserve = bytes;
2339
2223
  while (bytes_left_to_reserve > 0) {
2340
2224
  if (!reserved_page->next_page()->is_valid()) {
2341
- if (Heap::OldGenerationAllocationLimitReached()) return false;
2225
+ if (heap()->OldGenerationAllocationLimitReached()) return false;
2342
2226
  Expand(reserved_page);
2343
2227
  }
2344
2228
  bytes_left_to_reserve -= Page::kPageSize;
@@ -2356,7 +2240,7 @@ bool PagedSpace::ReserveSpace(int bytes) {
2356
2240
  // You have to call this last, since the implementation from PagedSpace
2357
2241
  // doesn't know that memory was 'promised' to large object space.
2358
2242
  bool LargeObjectSpace::ReserveSpace(int bytes) {
2359
- return Heap::OldGenerationSpaceAvailable() >= bytes;
2243
+ return heap()->OldGenerationSpaceAvailable() >= bytes;
2360
2244
  }
2361
2245
 
2362
2246
 
@@ -2375,7 +2259,7 @@ HeapObject* OldSpace::SlowAllocateRaw(int size_in_bytes) {
2375
2259
 
2376
2260
  // There is no next page in this space. Try free list allocation unless that
2377
2261
  // is currently forbidden.
2378
- if (!Heap::linear_allocation()) {
2262
+ if (!heap()->linear_allocation()) {
2379
2263
  int wasted_bytes;
2380
2264
  Object* result;
2381
2265
  MaybeObject* maybe = free_list_.Allocate(size_in_bytes, &wasted_bytes);
@@ -2402,7 +2286,8 @@ HeapObject* OldSpace::SlowAllocateRaw(int size_in_bytes) {
2402
2286
  // Free list allocation failed and there is no next page. Fail if we have
2403
2287
  // hit the old generation size limit that should cause a garbage
2404
2288
  // collection.
2405
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
2289
+ if (!heap()->always_allocate() &&
2290
+ heap()->OldGenerationAllocationLimitReached()) {
2406
2291
  return NULL;
2407
2292
  }
2408
2293
 
@@ -2465,28 +2350,14 @@ void OldSpace::DeallocateBlock(Address start,
2465
2350
 
2466
2351
 
2467
2352
  #ifdef DEBUG
2468
- struct CommentStatistic {
2469
- const char* comment;
2470
- int size;
2471
- int count;
2472
- void Clear() {
2473
- comment = NULL;
2474
- size = 0;
2475
- count = 0;
2476
- }
2477
- };
2478
-
2479
-
2480
- // must be small, since an iteration is used for lookup
2481
- const int kMaxComments = 64;
2482
- static CommentStatistic comments_statistics[kMaxComments+1];
2483
-
2484
-
2485
2353
  void PagedSpace::ReportCodeStatistics() {
2354
+ Isolate* isolate = Isolate::Current();
2355
+ CommentStatistic* comments_statistics =
2356
+ isolate->paged_space_comments_statistics();
2486
2357
  ReportCodeKindStatistics();
2487
2358
  PrintF("Code comment statistics (\" [ comment-txt : size/ "
2488
2359
  "count (average)\"):\n");
2489
- for (int i = 0; i <= kMaxComments; i++) {
2360
+ for (int i = 0; i <= CommentStatistic::kMaxComments; i++) {
2490
2361
  const CommentStatistic& cs = comments_statistics[i];
2491
2362
  if (cs.size > 0) {
2492
2363
  PrintF(" %-30s: %10d/%6d (%d)\n", cs.comment, cs.size, cs.count,
@@ -2498,23 +2369,30 @@ void PagedSpace::ReportCodeStatistics() {
2498
2369
 
2499
2370
 
2500
2371
  void PagedSpace::ResetCodeStatistics() {
2372
+ Isolate* isolate = Isolate::Current();
2373
+ CommentStatistic* comments_statistics =
2374
+ isolate->paged_space_comments_statistics();
2501
2375
  ClearCodeKindStatistics();
2502
- for (int i = 0; i < kMaxComments; i++) comments_statistics[i].Clear();
2503
- comments_statistics[kMaxComments].comment = "Unknown";
2504
- comments_statistics[kMaxComments].size = 0;
2505
- comments_statistics[kMaxComments].count = 0;
2376
+ for (int i = 0; i < CommentStatistic::kMaxComments; i++) {
2377
+ comments_statistics[i].Clear();
2378
+ }
2379
+ comments_statistics[CommentStatistic::kMaxComments].comment = "Unknown";
2380
+ comments_statistics[CommentStatistic::kMaxComments].size = 0;
2381
+ comments_statistics[CommentStatistic::kMaxComments].count = 0;
2506
2382
  }
2507
2383
 
2508
2384
 
2509
- // Adds comment to 'comment_statistics' table. Performance OK sa long as
2385
+ // Adds comment to 'comment_statistics' table. Performance OK as long as
2510
2386
  // 'kMaxComments' is small
2511
- static void EnterComment(const char* comment, int delta) {
2387
+ static void EnterComment(Isolate* isolate, const char* comment, int delta) {
2388
+ CommentStatistic* comments_statistics =
2389
+ isolate->paged_space_comments_statistics();
2512
2390
  // Do not count empty comments
2513
2391
  if (delta <= 0) return;
2514
- CommentStatistic* cs = &comments_statistics[kMaxComments];
2392
+ CommentStatistic* cs = &comments_statistics[CommentStatistic::kMaxComments];
2515
2393
  // Search for a free or matching entry in 'comments_statistics': 'cs'
2516
2394
  // points to result.
2517
- for (int i = 0; i < kMaxComments; i++) {
2395
+ for (int i = 0; i < CommentStatistic::kMaxComments; i++) {
2518
2396
  if (comments_statistics[i].comment == NULL) {
2519
2397
  cs = &comments_statistics[i];
2520
2398
  cs->comment = comment;
@@ -2532,7 +2410,7 @@ static void EnterComment(const char* comment, int delta) {
2532
2410
 
2533
2411
  // Call for each nested comment start (start marked with '[ xxx', end marked
2534
2412
  // with ']'. RelocIterator 'it' must point to a comment reloc info.
2535
- static void CollectCommentStatistics(RelocIterator* it) {
2413
+ static void CollectCommentStatistics(Isolate* isolate, RelocIterator* it) {
2536
2414
  ASSERT(!it->done());
2537
2415
  ASSERT(it->rinfo()->rmode() == RelocInfo::COMMENT);
2538
2416
  const char* tmp = reinterpret_cast<const char*>(it->rinfo()->data());
@@ -2557,13 +2435,13 @@ static void CollectCommentStatistics(RelocIterator* it) {
2557
2435
  flat_delta += static_cast<int>(it->rinfo()->pc() - prev_pc);
2558
2436
  if (txt[0] == ']') break; // End of nested comment
2559
2437
  // A new comment
2560
- CollectCommentStatistics(it);
2438
+ CollectCommentStatistics(isolate, it);
2561
2439
  // Skip code that was covered with previous comment
2562
2440
  prev_pc = it->rinfo()->pc();
2563
2441
  }
2564
2442
  it->next();
2565
2443
  }
2566
- EnterComment(comment_txt, flat_delta);
2444
+ EnterComment(isolate, comment_txt, flat_delta);
2567
2445
  }
2568
2446
 
2569
2447
 
@@ -2571,18 +2449,19 @@ static void CollectCommentStatistics(RelocIterator* it) {
2571
2449
  // - by code kind
2572
2450
  // - by code comment
2573
2451
  void PagedSpace::CollectCodeStatistics() {
2452
+ Isolate* isolate = heap()->isolate();
2574
2453
  HeapObjectIterator obj_it(this);
2575
2454
  for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
2576
2455
  if (obj->IsCode()) {
2577
2456
  Code* code = Code::cast(obj);
2578
- code_kind_statistics[code->kind()] += code->Size();
2457
+ isolate->code_kind_statistics()[code->kind()] += code->Size();
2579
2458
  RelocIterator it(code);
2580
2459
  int delta = 0;
2581
2460
  const byte* prev_pc = code->instruction_start();
2582
2461
  while (!it.done()) {
2583
2462
  if (it.rinfo()->rmode() == RelocInfo::COMMENT) {
2584
2463
  delta += static_cast<int>(it.rinfo()->pc() - prev_pc);
2585
- CollectCommentStatistics(&it);
2464
+ CollectCommentStatistics(isolate, &it);
2586
2465
  prev_pc = it.rinfo()->pc();
2587
2466
  }
2588
2467
  it.next();
@@ -2591,7 +2470,7 @@ void PagedSpace::CollectCodeStatistics() {
2591
2470
  ASSERT(code->instruction_start() <= prev_pc &&
2592
2471
  prev_pc <= code->instruction_end());
2593
2472
  delta += static_cast<int>(code->instruction_end() - prev_pc);
2594
- EnterComment("NoComment", delta);
2473
+ EnterComment(isolate, "NoComment", delta);
2595
2474
  }
2596
2475
  }
2597
2476
  }
@@ -2685,7 +2564,7 @@ HeapObject* FixedSpace::SlowAllocateRaw(int size_in_bytes) {
2685
2564
  // There is no next page in this space. Try free list allocation unless
2686
2565
  // that is currently forbidden. The fixed space free list implicitly assumes
2687
2566
  // that all free blocks are of the fixed size.
2688
- if (!Heap::linear_allocation()) {
2567
+ if (!heap()->linear_allocation()) {
2689
2568
  Object* result;
2690
2569
  MaybeObject* maybe = free_list_.Allocate();
2691
2570
  if (maybe->ToObject(&result)) {
@@ -2709,7 +2588,8 @@ HeapObject* FixedSpace::SlowAllocateRaw(int size_in_bytes) {
2709
2588
  // Free list allocation failed and there is no next page. Fail if we have
2710
2589
  // hit the old generation size limit that should cause a garbage
2711
2590
  // collection.
2712
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
2591
+ if (!heap()->always_allocate() &&
2592
+ heap()->OldGenerationAllocationLimitReached()) {
2713
2593
  return NULL;
2714
2594
  }
2715
2595
 
@@ -2811,7 +2691,7 @@ void MapSpace::VerifyObject(HeapObject* object) {
2811
2691
  void CellSpace::VerifyObject(HeapObject* object) {
2812
2692
  // The object should be a global object property cell or a free-list node.
2813
2693
  ASSERT(object->IsJSGlobalPropertyCell() ||
2814
- object->map() == Heap::two_pointer_filler_map());
2694
+ object->map() == heap()->two_pointer_filler_map());
2815
2695
  }
2816
2696
  #endif
2817
2697
 
@@ -2848,28 +2728,33 @@ LargeObjectChunk* LargeObjectChunk::New(int size_in_bytes,
2848
2728
  Executability executable) {
2849
2729
  size_t requested = ChunkSizeFor(size_in_bytes);
2850
2730
  size_t size;
2851
- void* mem = MemoryAllocator::AllocateRawMemory(requested, &size, executable);
2731
+ Isolate* isolate = Isolate::Current();
2732
+ void* mem = isolate->memory_allocator()->AllocateRawMemory(
2733
+ requested, &size, executable);
2852
2734
  if (mem == NULL) return NULL;
2853
2735
 
2854
2736
  // The start of the chunk may be overlayed with a page so we have to
2855
2737
  // make sure that the page flags fit in the size field.
2856
2738
  ASSERT((size & Page::kPageFlagMask) == 0);
2857
2739
 
2858
- LOG(NewEvent("LargeObjectChunk", mem, size));
2740
+ LOG(isolate, NewEvent("LargeObjectChunk", mem, size));
2859
2741
  if (size < requested) {
2860
- MemoryAllocator::FreeRawMemory(mem, size, executable);
2861
- LOG(DeleteEvent("LargeObjectChunk", mem));
2742
+ isolate->memory_allocator()->FreeRawMemory(
2743
+ mem, size, executable);
2744
+ LOG(isolate, DeleteEvent("LargeObjectChunk", mem));
2862
2745
  return NULL;
2863
2746
  }
2864
2747
 
2865
2748
  ObjectSpace space = (executable == EXECUTABLE)
2866
2749
  ? kObjectSpaceCodeSpace
2867
2750
  : kObjectSpaceLoSpace;
2868
- MemoryAllocator::PerformAllocationCallback(
2751
+ isolate->memory_allocator()->PerformAllocationCallback(
2869
2752
  space, kAllocationActionAllocate, size);
2870
2753
 
2871
2754
  LargeObjectChunk* chunk = reinterpret_cast<LargeObjectChunk*>(mem);
2872
2755
  chunk->size_ = size;
2756
+ Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
2757
+ page->heap_ = isolate->heap();
2873
2758
  return chunk;
2874
2759
  }
2875
2760
 
@@ -2885,8 +2770,8 @@ int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) {
2885
2770
  // -----------------------------------------------------------------------------
2886
2771
  // LargeObjectSpace
2887
2772
 
2888
- LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
2889
- : Space(id, NOT_EXECUTABLE), // Managed on a per-allocation basis
2773
+ LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
2774
+ : Space(heap, id, NOT_EXECUTABLE), // Managed on a per-allocation basis
2890
2775
  first_chunk_(NULL),
2891
2776
  size_(0),
2892
2777
  page_count_(0),
@@ -2906,15 +2791,17 @@ void LargeObjectSpace::TearDown() {
2906
2791
  while (first_chunk_ != NULL) {
2907
2792
  LargeObjectChunk* chunk = first_chunk_;
2908
2793
  first_chunk_ = first_chunk_->next();
2909
- LOG(DeleteEvent("LargeObjectChunk", chunk->address()));
2794
+ LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", chunk->address()));
2910
2795
  Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
2911
2796
  Executability executable =
2912
2797
  page->IsPageExecutable() ? EXECUTABLE : NOT_EXECUTABLE;
2913
2798
  ObjectSpace space = kObjectSpaceLoSpace;
2914
2799
  if (executable == EXECUTABLE) space = kObjectSpaceCodeSpace;
2915
2800
  size_t size = chunk->size();
2916
- MemoryAllocator::FreeRawMemory(chunk->address(), size, executable);
2917
- MemoryAllocator::PerformAllocationCallback(
2801
+ heap()->isolate()->memory_allocator()->FreeRawMemory(chunk->address(),
2802
+ size,
2803
+ executable);
2804
+ heap()->isolate()->memory_allocator()->PerformAllocationCallback(
2918
2805
  space, kAllocationActionFree, size);
2919
2806
  }
2920
2807
 
@@ -2929,7 +2816,8 @@ void LargeObjectSpace::TearDown() {
2929
2816
  void LargeObjectSpace::Protect() {
2930
2817
  LargeObjectChunk* chunk = first_chunk_;
2931
2818
  while (chunk != NULL) {
2932
- MemoryAllocator::Protect(chunk->address(), chunk->size());
2819
+ heap()->isolate()->memory_allocator()->Protect(chunk->address(),
2820
+ chunk->size());
2933
2821
  chunk = chunk->next();
2934
2822
  }
2935
2823
  }
@@ -2939,8 +2827,8 @@ void LargeObjectSpace::Unprotect() {
2939
2827
  LargeObjectChunk* chunk = first_chunk_;
2940
2828
  while (chunk != NULL) {
2941
2829
  bool is_code = chunk->GetObject()->IsCode();
2942
- MemoryAllocator::Unprotect(chunk->address(), chunk->size(),
2943
- is_code ? EXECUTABLE : NOT_EXECUTABLE);
2830
+ heap()->isolate()->memory_allocator()->Unprotect(chunk->address(),
2831
+ chunk->size(), is_code ? EXECUTABLE : NOT_EXECUTABLE);
2944
2832
  chunk = chunk->next();
2945
2833
  }
2946
2834
  }
@@ -2955,7 +2843,8 @@ MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size,
2955
2843
 
2956
2844
  // Check if we want to force a GC before growing the old space further.
2957
2845
  // If so, fail the allocation.
2958
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
2846
+ if (!heap()->always_allocate() &&
2847
+ heap()->OldGenerationAllocationLimitReached()) {
2959
2848
  return Failure::RetryAfterGC(identity());
2960
2849
  }
2961
2850
 
@@ -3060,22 +2949,22 @@ void LargeObjectSpace::IterateDirtyRegions(ObjectSlotCallback copy_object) {
3060
2949
  // Iterate regions of the first normal page covering object.
3061
2950
  uint32_t first_region_number = page->GetRegionNumberForAddress(start);
3062
2951
  newmarks |=
3063
- Heap::IterateDirtyRegions(marks >> first_region_number,
3064
- start,
3065
- end,
3066
- &Heap::IteratePointersInDirtyRegion,
3067
- copy_object) << first_region_number;
2952
+ heap()->IterateDirtyRegions(marks >> first_region_number,
2953
+ start,
2954
+ end,
2955
+ &Heap::IteratePointersInDirtyRegion,
2956
+ copy_object) << first_region_number;
3068
2957
 
3069
2958
  start = end;
3070
2959
  end = start + Page::kPageSize;
3071
2960
  while (end <= object_end) {
3072
2961
  // Iterate next 32 regions.
3073
2962
  newmarks |=
3074
- Heap::IterateDirtyRegions(marks,
3075
- start,
3076
- end,
3077
- &Heap::IteratePointersInDirtyRegion,
3078
- copy_object);
2963
+ heap()->IterateDirtyRegions(marks,
2964
+ start,
2965
+ end,
2966
+ &Heap::IteratePointersInDirtyRegion,
2967
+ copy_object);
3079
2968
  start = end;
3080
2969
  end = start + Page::kPageSize;
3081
2970
  }
@@ -3084,11 +2973,11 @@ void LargeObjectSpace::IterateDirtyRegions(ObjectSlotCallback copy_object) {
3084
2973
  // Iterate the last piece of an object which is less than
3085
2974
  // Page::kPageSize.
3086
2975
  newmarks |=
3087
- Heap::IterateDirtyRegions(marks,
3088
- start,
3089
- object_end,
3090
- &Heap::IteratePointersInDirtyRegion,
3091
- copy_object);
2976
+ heap()->IterateDirtyRegions(marks,
2977
+ start,
2978
+ object_end,
2979
+ &Heap::IteratePointersInDirtyRegion,
2980
+ copy_object);
3092
2981
  }
3093
2982
 
3094
2983
  page->SetRegionMarks(newmarks);
@@ -3105,7 +2994,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
3105
2994
  HeapObject* object = current->GetObject();
3106
2995
  if (object->IsMarked()) {
3107
2996
  object->ClearMark();
3108
- MarkCompactCollector::tracer()->decrement_marked_count();
2997
+ heap()->mark_compact_collector()->tracer()->decrement_marked_count();
3109
2998
  previous = current;
3110
2999
  current = current->next();
3111
3000
  } else {
@@ -3125,7 +3014,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
3125
3014
  }
3126
3015
 
3127
3016
  // Free the chunk.
3128
- MarkCompactCollector::ReportDeleteIfNeeded(object);
3017
+ heap()->mark_compact_collector()->ReportDeleteIfNeeded(
3018
+ object, heap()->isolate());
3129
3019
  LiveObjectList::ProcessNonLive(object);
3130
3020
 
3131
3021
  size_ -= static_cast<int>(chunk_size);
@@ -3133,10 +3023,12 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
3133
3023
  page_count_--;
3134
3024
  ObjectSpace space = kObjectSpaceLoSpace;
3135
3025
  if (executable == EXECUTABLE) space = kObjectSpaceCodeSpace;
3136
- MemoryAllocator::FreeRawMemory(chunk_address, chunk_size, executable);
3137
- MemoryAllocator::PerformAllocationCallback(space, kAllocationActionFree,
3138
- size_);
3139
- LOG(DeleteEvent("LargeObjectChunk", chunk_address));
3026
+ heap()->isolate()->memory_allocator()->FreeRawMemory(chunk_address,
3027
+ chunk_size,
3028
+ executable);
3029
+ heap()->isolate()->memory_allocator()->PerformAllocationCallback(
3030
+ space, kAllocationActionFree, size_);
3031
+ LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", chunk_address));
3140
3032
  }
3141
3033
  }
3142
3034
  }
@@ -3144,7 +3036,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
3144
3036
 
3145
3037
  bool LargeObjectSpace::Contains(HeapObject* object) {
3146
3038
  Address address = object->address();
3147
- if (Heap::new_space()->Contains(address)) {
3039
+ if (heap()->new_space()->Contains(address)) {
3148
3040
  return false;
3149
3041
  }
3150
3042
  Page* page = Page::FromAddress(address);
@@ -3173,7 +3065,7 @@ void LargeObjectSpace::Verify() {
3173
3065
  // in map space.
3174
3066
  Map* map = object->map();
3175
3067
  ASSERT(map->IsMap());
3176
- ASSERT(Heap::map_space()->Contains(map));
3068
+ ASSERT(heap()->map_space()->Contains(map));
3177
3069
 
3178
3070
  // We have only code, sequential strings, external strings
3179
3071
  // (sequential strings that have been morphed into external
@@ -3200,9 +3092,9 @@ void LargeObjectSpace::Verify() {
3200
3092
  Object* element = array->get(j);
3201
3093
  if (element->IsHeapObject()) {
3202
3094
  HeapObject* element_object = HeapObject::cast(element);
3203
- ASSERT(Heap::Contains(element_object));
3095
+ ASSERT(heap()->Contains(element_object));
3204
3096
  ASSERT(element_object->map()->IsMap());
3205
- if (Heap::InNewSpace(element_object)) {
3097
+ if (heap()->InNewSpace(element_object)) {
3206
3098
  Address array_addr = object->address();
3207
3099
  Address element_addr = array_addr + FixedArray::kHeaderSize +
3208
3100
  j * kPointerSize;
@@ -3241,11 +3133,12 @@ void LargeObjectSpace::ReportStatistics() {
3241
3133
 
3242
3134
 
3243
3135
  void LargeObjectSpace::CollectCodeStatistics() {
3136
+ Isolate* isolate = heap()->isolate();
3244
3137
  LargeObjectIterator obj_it(this);
3245
3138
  for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
3246
3139
  if (obj->IsCode()) {
3247
3140
  Code* code = Code::cast(obj);
3248
- code_kind_statistics[code->kind()] += code->Size();
3141
+ isolate->code_kind_statistics()[code->kind()] += code->Size();
3249
3142
  }
3250
3143
  }
3251
3144
  }