mustang 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -49,119 +49,114 @@
49
49
  #include "regexp-macro-assembler.h"
50
50
  #include "arm/regexp-macro-assembler-arm.h"
51
51
  #endif
52
-
52
+ #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
53
+ #include "regexp-macro-assembler.h"
54
+ #include "mips/regexp-macro-assembler-mips.h"
55
+ #endif
53
56
 
54
57
  namespace v8 {
55
58
  namespace internal {
56
59
 
57
60
 
58
- String* Heap::hidden_symbol_;
59
- Object* Heap::roots_[Heap::kRootListLength];
60
- Object* Heap::global_contexts_list_;
61
-
62
-
63
- NewSpace Heap::new_space_;
64
- OldSpace* Heap::old_pointer_space_ = NULL;
65
- OldSpace* Heap::old_data_space_ = NULL;
66
- OldSpace* Heap::code_space_ = NULL;
67
- MapSpace* Heap::map_space_ = NULL;
68
- CellSpace* Heap::cell_space_ = NULL;
69
- LargeObjectSpace* Heap::lo_space_ = NULL;
70
-
71
61
  static const intptr_t kMinimumPromotionLimit = 2 * MB;
72
62
  static const intptr_t kMinimumAllocationLimit = 8 * MB;
73
63
 
74
- intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
75
- intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
76
64
 
77
- int Heap::old_gen_exhausted_ = false;
65
+ static Mutex* gc_initializer_mutex = OS::CreateMutex();
78
66
 
79
- int Heap::amount_of_external_allocated_memory_ = 0;
80
- int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
81
67
 
68
+ Heap::Heap()
69
+ : isolate_(NULL),
82
70
  // semispace_size_ should be a power of 2 and old_generation_size_ should be
83
71
  // a multiple of Page::kPageSize.
84
72
  #if defined(ANDROID)
85
- static const int default_max_semispace_size_ = 2*MB;
86
- intptr_t Heap::max_old_generation_size_ = 192*MB;
87
- int Heap::initial_semispace_size_ = 128*KB;
88
- intptr_t Heap::code_range_size_ = 0;
89
- intptr_t Heap::max_executable_size_ = max_old_generation_size_;
73
+ reserved_semispace_size_(2*MB),
74
+ max_semispace_size_(2*MB),
75
+ initial_semispace_size_(128*KB),
76
+ max_old_generation_size_(192*MB),
77
+ max_executable_size_(max_old_generation_size_),
78
+ code_range_size_(0),
90
79
  #elif defined(V8_TARGET_ARCH_X64)
91
- static const int default_max_semispace_size_ = 16*MB;
92
- intptr_t Heap::max_old_generation_size_ = 1*GB;
93
- int Heap::initial_semispace_size_ = 1*MB;
94
- intptr_t Heap::code_range_size_ = 512*MB;
95
- intptr_t Heap::max_executable_size_ = 256*MB;
96
- #else
97
- static const int default_max_semispace_size_ = 8*MB;
98
- intptr_t Heap::max_old_generation_size_ = 512*MB;
99
- int Heap::initial_semispace_size_ = 512*KB;
100
- intptr_t Heap::code_range_size_ = 0;
101
- intptr_t Heap::max_executable_size_ = 128*MB;
102
- #endif
103
-
104
- // Allow build-time customization of the max semispace size. Building
105
- // V8 with snapshots and a non-default max semispace size is much
106
- // easier if you can define it as part of the build environment.
107
- #if defined(V8_MAX_SEMISPACE_SIZE)
108
- int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
80
+ reserved_semispace_size_(16*MB),
81
+ max_semispace_size_(16*MB),
82
+ initial_semispace_size_(1*MB),
83
+ max_old_generation_size_(1*GB),
84
+ max_executable_size_(256*MB),
85
+ code_range_size_(512*MB),
109
86
  #else
110
- int Heap::max_semispace_size_ = default_max_semispace_size_;
87
+ reserved_semispace_size_(8*MB),
88
+ max_semispace_size_(8*MB),
89
+ initial_semispace_size_(512*KB),
90
+ max_old_generation_size_(512*MB),
91
+ max_executable_size_(128*MB),
92
+ code_range_size_(0),
111
93
  #endif
112
-
113
- // The snapshot semispace size will be the default semispace size if
114
- // snapshotting is used and will be the requested semispace size as
115
- // set up by ConfigureHeap otherwise.
116
- int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
117
-
118
- List<Heap::GCPrologueCallbackPair> Heap::gc_prologue_callbacks_;
119
- List<Heap::GCEpilogueCallbackPair> Heap::gc_epilogue_callbacks_;
120
-
121
- GCCallback Heap::global_gc_prologue_callback_ = NULL;
122
- GCCallback Heap::global_gc_epilogue_callback_ = NULL;
123
- HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
124
-
125
94
  // Variables set based on semispace_size_ and old_generation_size_ in
126
- // ConfigureHeap.
127
-
95
+ // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
128
96
  // Will be 4 * reserved_semispace_size_ to ensure that young
129
97
  // generation can be aligned to its size.
130
- int Heap::survived_since_last_expansion_ = 0;
131
- intptr_t Heap::external_allocation_limit_ = 0;
132
-
133
- Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
134
-
135
- int Heap::mc_count_ = 0;
136
- int Heap::ms_count_ = 0;
137
- unsigned int Heap::gc_count_ = 0;
138
-
139
- GCTracer* Heap::tracer_ = NULL;
140
-
141
- int Heap::unflattened_strings_length_ = 0;
142
-
143
- int Heap::always_allocate_scope_depth_ = 0;
144
- int Heap::linear_allocation_scope_depth_ = 0;
145
- int Heap::contexts_disposed_ = 0;
146
-
147
- int Heap::young_survivors_after_last_gc_ = 0;
148
- int Heap::high_survival_rate_period_length_ = 0;
149
- double Heap::survival_rate_ = 0;
150
- Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
151
- Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
152
-
98
+ survived_since_last_expansion_(0),
99
+ always_allocate_scope_depth_(0),
100
+ linear_allocation_scope_depth_(0),
101
+ contexts_disposed_(0),
102
+ new_space_(this),
103
+ old_pointer_space_(NULL),
104
+ old_data_space_(NULL),
105
+ code_space_(NULL),
106
+ map_space_(NULL),
107
+ cell_space_(NULL),
108
+ lo_space_(NULL),
109
+ gc_state_(NOT_IN_GC),
110
+ mc_count_(0),
111
+ ms_count_(0),
112
+ gc_count_(0),
113
+ unflattened_strings_length_(0),
153
114
  #ifdef DEBUG
154
- bool Heap::allocation_allowed_ = true;
155
-
156
- int Heap::allocation_timeout_ = 0;
157
- bool Heap::disallow_allocation_failure_ = false;
115
+ allocation_allowed_(true),
116
+ allocation_timeout_(0),
117
+ disallow_allocation_failure_(false),
118
+ debug_utils_(NULL),
158
119
  #endif // DEBUG
120
+ old_gen_promotion_limit_(kMinimumPromotionLimit),
121
+ old_gen_allocation_limit_(kMinimumAllocationLimit),
122
+ external_allocation_limit_(0),
123
+ amount_of_external_allocated_memory_(0),
124
+ amount_of_external_allocated_memory_at_last_global_gc_(0),
125
+ old_gen_exhausted_(false),
126
+ hidden_symbol_(NULL),
127
+ global_gc_prologue_callback_(NULL),
128
+ global_gc_epilogue_callback_(NULL),
129
+ gc_safe_size_of_old_object_(NULL),
130
+ tracer_(NULL),
131
+ young_survivors_after_last_gc_(0),
132
+ high_survival_rate_period_length_(0),
133
+ survival_rate_(0),
134
+ previous_survival_rate_trend_(Heap::STABLE),
135
+ survival_rate_trend_(Heap::STABLE),
136
+ max_gc_pause_(0),
137
+ max_alive_after_gc_(0),
138
+ min_in_mutator_(kMaxInt),
139
+ alive_after_last_gc_(0),
140
+ last_gc_end_timestamp_(0.0),
141
+ page_watermark_invalidated_mark_(1 << Page::WATERMARK_INVALIDATED),
142
+ number_idle_notifications_(0),
143
+ last_idle_notification_gc_count_(0),
144
+ last_idle_notification_gc_count_init_(false),
145
+ configured_(false),
146
+ is_safe_to_read_maps_(true) {
147
+ // Allow build-time customization of the max semispace size. Building
148
+ // V8 with snapshots and a non-default max semispace size is much
149
+ // easier if you can define it as part of the build environment.
150
+ #if defined(V8_MAX_SEMISPACE_SIZE)
151
+ max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
152
+ #endif
153
+
154
+ memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
155
+ global_contexts_list_ = NULL;
156
+ mark_compact_collector_.heap_ = this;
157
+ external_string_table_.heap_ = this;
158
+ }
159
159
 
160
- intptr_t GCTracer::alive_after_last_gc_ = 0;
161
- double GCTracer::last_gc_end_timestamp_ = 0.0;
162
- int GCTracer::max_gc_pause_ = 0;
163
- intptr_t GCTracer::max_alive_after_gc_ = 0;
164
- int GCTracer::min_in_mutator_ = kMaxInt;
165
160
 
166
161
  intptr_t Heap::Capacity() {
167
162
  if (!HasBeenSetup()) return 0;
@@ -190,7 +185,7 @@ intptr_t Heap::CommittedMemory() {
190
185
  intptr_t Heap::CommittedMemoryExecutable() {
191
186
  if (!HasBeenSetup()) return 0;
192
187
 
193
- return MemoryAllocator::SizeExecutable();
188
+ return isolate()->memory_allocator()->SizeExecutable();
194
189
  }
195
190
 
196
191
 
@@ -217,8 +212,8 @@ bool Heap::HasBeenSetup() {
217
212
 
218
213
 
219
214
  int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
220
- ASSERT(!Heap::InNewSpace(object)); // Code only works for old objects.
221
- ASSERT(!MarkCompactCollector::are_map_pointers_encoded());
215
+ ASSERT(!HEAP->InNewSpace(object)); // Code only works for old objects.
216
+ ASSERT(!HEAP->mark_compact_collector()->are_map_pointers_encoded());
222
217
  MapWord map_word = object->map_word();
223
218
  map_word.ClearMark();
224
219
  map_word.ClearOverflow();
@@ -227,8 +222,8 @@ int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
227
222
 
228
223
 
229
224
  int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
230
- ASSERT(!Heap::InNewSpace(object)); // Code only works for old objects.
231
- ASSERT(MarkCompactCollector::are_map_pointers_encoded());
225
+ ASSERT(!HEAP->InNewSpace(object)); // Code only works for old objects.
226
+ ASSERT(HEAP->mark_compact_collector()->are_map_pointers_encoded());
232
227
  uint32_t marker = Memory::uint32_at(object->address());
233
228
  if (marker == MarkCompactCollector::kSingleFreeEncoding) {
234
229
  return kIntSize;
@@ -236,7 +231,7 @@ int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
236
231
  return Memory::int_at(object->address() + kIntSize);
237
232
  } else {
238
233
  MapWord map_word = object->map_word();
239
- Address map_address = map_word.DecodeMapAddress(Heap::map_space());
234
+ Address map_address = map_word.DecodeMapAddress(HEAP->map_space());
240
235
  Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_address));
241
236
  return object->SizeFromMap(map);
242
237
  }
@@ -246,19 +241,20 @@ int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
246
241
  GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
247
242
  // Is global GC requested?
248
243
  if (space != NEW_SPACE || FLAG_gc_global) {
249
- Counters::gc_compactor_caused_by_request.Increment();
244
+ isolate_->counters()->gc_compactor_caused_by_request()->Increment();
250
245
  return MARK_COMPACTOR;
251
246
  }
252
247
 
253
248
  // Is enough data promoted to justify a global GC?
254
249
  if (OldGenerationPromotionLimitReached()) {
255
- Counters::gc_compactor_caused_by_promoted_data.Increment();
250
+ isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
256
251
  return MARK_COMPACTOR;
257
252
  }
258
253
 
259
254
  // Have allocation in OLD and LO failed?
260
255
  if (old_gen_exhausted_) {
261
- Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
256
+ isolate_->counters()->
257
+ gc_compactor_caused_by_oldspace_exhaustion()->Increment();
262
258
  return MARK_COMPACTOR;
263
259
  }
264
260
 
@@ -271,8 +267,9 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
271
267
  // and does not count available bytes already in the old space or code
272
268
  // space. Undercounting is safe---we may get an unrequested full GC when
273
269
  // a scavenge would have succeeded.
274
- if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
275
- Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
270
+ if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
271
+ isolate_->counters()->
272
+ gc_compactor_caused_by_oldspace_exhaustion()->Increment();
276
273
  return MARK_COMPACTOR;
277
274
  }
278
275
 
@@ -317,8 +314,8 @@ void Heap::PrintShortHeapStatistics() {
317
314
  if (!FLAG_trace_gc_verbose) return;
318
315
  PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
319
316
  ", available: %8" V8_PTR_PREFIX "d\n",
320
- MemoryAllocator::Size(),
321
- MemoryAllocator::Available());
317
+ isolate_->memory_allocator()->Size(),
318
+ isolate_->memory_allocator()->Available());
322
319
  PrintF("New space, used: %8" V8_PTR_PREFIX "d"
323
320
  ", available: %8" V8_PTR_PREFIX "d\n",
324
321
  Heap::new_space_.Size(),
@@ -383,7 +380,7 @@ void Heap::ReportStatisticsAfterGC() {
383
380
 
384
381
 
385
382
  void Heap::GarbageCollectionPrologue() {
386
- TranscendentalCache::Clear();
383
+ isolate_->transcendental_cache()->Clear();
387
384
  ClearJSFunctionResultCaches();
388
385
  gc_count_++;
389
386
  unflattened_strings_length_ = 0;
@@ -424,21 +421,24 @@ void Heap::GarbageCollectionEpilogue() {
424
421
  Verify();
425
422
  }
426
423
 
427
- if (FLAG_print_global_handles) GlobalHandles::Print();
424
+ if (FLAG_print_global_handles) isolate_->global_handles()->Print();
428
425
  if (FLAG_print_handles) PrintHandles();
429
426
  if (FLAG_gc_verbose) Print();
430
427
  if (FLAG_code_stats) ReportCodeStatistics("After GC");
431
428
  #endif
432
429
 
433
- Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects()));
430
+ isolate_->counters()->alive_after_last_gc()->Set(
431
+ static_cast<int>(SizeOfObjects()));
434
432
 
435
- Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
436
- Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
433
+ isolate_->counters()->symbol_table_capacity()->Set(
434
+ symbol_table()->Capacity());
435
+ isolate_->counters()->number_of_symbols()->Set(
436
+ symbol_table()->NumberOfElements());
437
437
  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
438
438
  ReportStatisticsAfterGC();
439
439
  #endif
440
440
  #ifdef ENABLE_DEBUGGER_SUPPORT
441
- Debug::AfterGarbageCollection();
441
+ isolate_->debug()->AfterGarbageCollection();
442
442
  #endif
443
443
  }
444
444
 
@@ -447,9 +447,9 @@ void Heap::CollectAllGarbage(bool force_compaction) {
447
447
  // Since we are ignoring the return value, the exact choice of space does
448
448
  // not matter, so long as we do not specify NEW_SPACE, which would not
449
449
  // cause a full GC.
450
- MarkCompactCollector::SetForceCompaction(force_compaction);
450
+ mark_compact_collector_.SetForceCompaction(force_compaction);
451
451
  CollectGarbage(OLD_POINTER_SPACE);
452
- MarkCompactCollector::SetForceCompaction(false);
452
+ mark_compact_collector_.SetForceCompaction(false);
453
453
  }
454
454
 
455
455
 
@@ -457,7 +457,7 @@ void Heap::CollectAllAvailableGarbage() {
457
457
  // Since we are ignoring the return value, the exact choice of space does
458
458
  // not matter, so long as we do not specify NEW_SPACE, which would not
459
459
  // cause a full GC.
460
- MarkCompactCollector::SetForceCompaction(true);
460
+ mark_compact_collector()->SetForceCompaction(true);
461
461
 
462
462
  // Major GC would invoke weak handle callbacks on weakly reachable
463
463
  // handles, but won't collect weakly reachable objects until next
@@ -473,13 +473,13 @@ void Heap::CollectAllAvailableGarbage() {
473
473
  break;
474
474
  }
475
475
  }
476
- MarkCompactCollector::SetForceCompaction(false);
476
+ mark_compact_collector()->SetForceCompaction(false);
477
477
  }
478
478
 
479
479
 
480
480
  bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
481
481
  // The VM is in the GC state until exiting this function.
482
- VMState state(GC);
482
+ VMState state(isolate_, GC);
483
483
 
484
484
  #ifdef DEBUG
485
485
  // Reset the allocation timeout to the GC interval, but make sure to
@@ -492,7 +492,7 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
492
492
 
493
493
  bool next_gc_likely_to_collect_more = false;
494
494
 
495
- { GCTracer tracer;
495
+ { GCTracer tracer(this);
496
496
  GarbageCollectionPrologue();
497
497
  // The GC count was incremented in the prologue. Tell the tracer about
498
498
  // it.
@@ -502,8 +502,8 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
502
502
  tracer.set_collector(collector);
503
503
 
504
504
  HistogramTimer* rate = (collector == SCAVENGER)
505
- ? &Counters::gc_scavenger
506
- : &Counters::gc_compactor;
505
+ ? isolate_->counters()->gc_scavenger()
506
+ : isolate_->counters()->gc_compactor();
507
507
  rate->Start();
508
508
  next_gc_likely_to_collect_more =
509
509
  PerformGarbageCollection(collector, &tracer);
@@ -522,7 +522,7 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
522
522
 
523
523
 
524
524
  void Heap::PerformScavenge() {
525
- GCTracer tracer;
525
+ GCTracer tracer(this);
526
526
  PerformGarbageCollection(SCAVENGER, &tracer);
527
527
  }
528
528
 
@@ -531,7 +531,6 @@ void Heap::PerformScavenge() {
531
531
  // Helper class for verifying the symbol table.
532
532
  class SymbolTableVerifier : public ObjectVisitor {
533
533
  public:
534
- SymbolTableVerifier() { }
535
534
  void VisitPointers(Object** start, Object** end) {
536
535
  // Visit all HeapObject pointers in [start, end).
537
536
  for (Object** p = start; p < end; p++) {
@@ -548,7 +547,7 @@ class SymbolTableVerifier : public ObjectVisitor {
548
547
  static void VerifySymbolTable() {
549
548
  #ifdef DEBUG
550
549
  SymbolTableVerifier verifier;
551
- Heap::symbol_table()->IterateElements(&verifier);
550
+ HEAP->symbol_table()->IterateElements(&verifier);
552
551
  #endif // DEBUG
553
552
  }
554
553
 
@@ -633,7 +632,7 @@ void Heap::EnsureFromSpaceIsCommitted() {
633
632
 
634
633
 
635
634
  void Heap::ClearJSFunctionResultCaches() {
636
- if (Bootstrapper::IsActive()) return;
635
+ if (isolate_->bootstrapper()->IsActive()) return;
637
636
 
638
637
  Object* context = global_contexts_list_;
639
638
  while (!context->IsUndefined()) {
@@ -651,8 +650,9 @@ void Heap::ClearJSFunctionResultCaches() {
651
650
  }
652
651
 
653
652
 
653
+
654
654
  void Heap::ClearNormalizedMapCaches() {
655
- if (Bootstrapper::IsActive()) return;
655
+ if (isolate_->bootstrapper()->IsActive()) return;
656
656
 
657
657
  Object* context = global_contexts_list_;
658
658
  while (!context->IsUndefined()) {
@@ -709,7 +709,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
709
709
  bool next_gc_likely_to_collect_more = false;
710
710
 
711
711
  if (collector != SCAVENGER) {
712
- PROFILE(CodeMovingGCEvent());
712
+ PROFILE(isolate_, CodeMovingGCEvent());
713
713
  }
714
714
 
715
715
  VerifySymbolTable();
@@ -768,13 +768,13 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
768
768
  UpdateSurvivalRateTrend(start_new_space_size);
769
769
  }
770
770
 
771
- Counters::objs_since_last_young.Set(0);
771
+ isolate_->counters()->objs_since_last_young()->Set(0);
772
772
 
773
773
  if (collector == MARK_COMPACTOR) {
774
774
  DisableAssertNoAllocation allow_allocation;
775
775
  GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
776
776
  next_gc_likely_to_collect_more =
777
- GlobalHandles::PostGarbageCollectionProcessing();
777
+ isolate_->global_handles()->PostGarbageCollectionProcessing();
778
778
  }
779
779
 
780
780
  // Update relocatables.
@@ -808,11 +808,11 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
808
808
 
809
809
  void Heap::MarkCompact(GCTracer* tracer) {
810
810
  gc_state_ = MARK_COMPACT;
811
- LOG(ResourceEvent("markcompact", "begin"));
811
+ LOG(isolate_, ResourceEvent("markcompact", "begin"));
812
812
 
813
- MarkCompactCollector::Prepare(tracer);
813
+ mark_compact_collector_.Prepare(tracer);
814
814
 
815
- bool is_compacting = MarkCompactCollector::IsCompacting();
815
+ bool is_compacting = mark_compact_collector_.IsCompacting();
816
816
 
817
817
  if (is_compacting) {
818
818
  mc_count_++;
@@ -823,15 +823,17 @@ void Heap::MarkCompact(GCTracer* tracer) {
823
823
 
824
824
  MarkCompactPrologue(is_compacting);
825
825
 
826
- MarkCompactCollector::CollectGarbage();
826
+ is_safe_to_read_maps_ = false;
827
+ mark_compact_collector_.CollectGarbage();
828
+ is_safe_to_read_maps_ = true;
827
829
 
828
- LOG(ResourceEvent("markcompact", "end"));
830
+ LOG(isolate_, ResourceEvent("markcompact", "end"));
829
831
 
830
832
  gc_state_ = NOT_IN_GC;
831
833
 
832
834
  Shrink();
833
835
 
834
- Counters::objs_since_last_full.Set(0);
836
+ isolate_->counters()->objs_since_last_full()->Set(0);
835
837
 
836
838
  contexts_disposed_ = 0;
837
839
  }
@@ -840,13 +842,11 @@ void Heap::MarkCompact(GCTracer* tracer) {
840
842
  void Heap::MarkCompactPrologue(bool is_compacting) {
841
843
  // At any old GC clear the keyed lookup cache to enable collection of unused
842
844
  // maps.
843
- KeyedLookupCache::Clear();
844
- ContextSlotCache::Clear();
845
- DescriptorLookupCache::Clear();
846
-
847
- RuntimeProfiler::MarkCompactPrologue(is_compacting);
845
+ isolate_->keyed_lookup_cache()->Clear();
846
+ isolate_->context_slot_cache()->Clear();
847
+ isolate_->descriptor_lookup_cache()->Clear();
848
848
 
849
- CompilationCache::MarkCompactPrologue();
849
+ isolate_->compilation_cache()->MarkCompactPrologue();
850
850
 
851
851
  CompletelyClearInstanceofCache();
852
852
 
@@ -870,6 +870,7 @@ Object* Heap::FindCodeObject(Address a) {
870
870
  // Helper class for copying HeapObjects
871
871
  class ScavengeVisitor: public ObjectVisitor {
872
872
  public:
873
+ explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
873
874
 
874
875
  void VisitPointer(Object** p) { ScavengePointer(p); }
875
876
 
@@ -881,48 +882,15 @@ class ScavengeVisitor: public ObjectVisitor {
881
882
  private:
882
883
  void ScavengePointer(Object** p) {
883
884
  Object* object = *p;
884
- if (!Heap::InNewSpace(object)) return;
885
+ if (!heap_->InNewSpace(object)) return;
885
886
  Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
886
887
  reinterpret_cast<HeapObject*>(object));
887
888
  }
888
- };
889
-
890
-
891
- // A queue of objects promoted during scavenge. Each object is accompanied
892
- // by it's size to avoid dereferencing a map pointer for scanning.
893
- class PromotionQueue {
894
- public:
895
- void Initialize(Address start_address) {
896
- front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
897
- }
898
889
 
899
- bool is_empty() { return front_ <= rear_; }
900
-
901
- void insert(HeapObject* target, int size) {
902
- *(--rear_) = reinterpret_cast<intptr_t>(target);
903
- *(--rear_) = size;
904
- // Assert no overflow into live objects.
905
- ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
906
- }
907
-
908
- void remove(HeapObject** target, int* size) {
909
- *target = reinterpret_cast<HeapObject*>(*(--front_));
910
- *size = static_cast<int>(*(--front_));
911
- // Assert no underflow.
912
- ASSERT(front_ >= rear_);
913
- }
914
-
915
- private:
916
- // The front of the queue is higher in memory than the rear.
917
- intptr_t* front_;
918
- intptr_t* rear_;
890
+ Heap* heap_;
919
891
  };
920
892
 
921
893
 
922
- // Shared state read by the scavenge collector and set by ScavengeObject.
923
- static PromotionQueue promotion_queue;
924
-
925
-
926
894
  #ifdef DEBUG
927
895
  // Visitor class to verify pointers in code or data space do not point into
928
896
  // new space.
@@ -931,7 +899,7 @@ class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
931
899
  void VisitPointers(Object** start, Object**end) {
932
900
  for (Object** current = start; current < end; current++) {
933
901
  if ((*current)->IsHeapObject()) {
934
- ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
902
+ ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
935
903
  }
936
904
  }
937
905
  }
@@ -942,12 +910,12 @@ static void VerifyNonPointerSpacePointers() {
942
910
  // Verify that there are no pointers to new space in spaces where we
943
911
  // do not expect them.
944
912
  VerifyNonPointerSpacePointersVisitor v;
945
- HeapObjectIterator code_it(Heap::code_space());
913
+ HeapObjectIterator code_it(HEAP->code_space());
946
914
  for (HeapObject* object = code_it.next();
947
915
  object != NULL; object = code_it.next())
948
916
  object->Iterate(&v);
949
917
 
950
- HeapObjectIterator data_it(Heap::old_data_space());
918
+ HeapObjectIterator data_it(HEAP->old_data_space());
951
919
  for (HeapObject* object = data_it.next();
952
920
  object != NULL; object = data_it.next())
953
921
  object->Iterate(&v);
@@ -973,7 +941,9 @@ void Heap::Scavenge() {
973
941
 
974
942
  gc_state_ = SCAVENGE;
975
943
 
976
- Page::FlipMeaningOfInvalidatedWatermarkFlag();
944
+ SwitchScavengingVisitorsTableIfProfilingWasEnabled();
945
+
946
+ Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
977
947
  #ifdef DEBUG
978
948
  VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
979
949
  VerifyPageWatermarkValidity(map_space_, ALL_VALID);
@@ -988,10 +958,10 @@ void Heap::Scavenge() {
988
958
  map_space_->FlushTopPageWatermark();
989
959
 
990
960
  // Implements Cheney's copying algorithm
991
- LOG(ResourceEvent("scavenge", "begin"));
961
+ LOG(isolate_, ResourceEvent("scavenge", "begin"));
992
962
 
993
963
  // Clear descriptor cache.
994
- DescriptorLookupCache::Clear();
964
+ isolate_->descriptor_lookup_cache()->Clear();
995
965
 
996
966
  // Used for updating survived_since_last_expansion_ at function end.
997
967
  intptr_t survived_watermark = PromotedSpaceSize();
@@ -1021,16 +991,17 @@ void Heap::Scavenge() {
1021
991
  // frees up its size in bytes from the top of the new space, and
1022
992
  // objects are at least one pointer in size.
1023
993
  Address new_space_front = new_space_.ToSpaceLow();
1024
- promotion_queue.Initialize(new_space_.ToSpaceHigh());
994
+ promotion_queue_.Initialize(new_space_.ToSpaceHigh());
1025
995
 
1026
- ScavengeVisitor scavenge_visitor;
996
+ is_safe_to_read_maps_ = false;
997
+ ScavengeVisitor scavenge_visitor(this);
1027
998
  // Copy roots.
1028
999
  IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1029
1000
 
1030
1001
  // Copy objects reachable from the old generation. By definition,
1031
1002
  // there are no intergenerational pointers in code or data spaces.
1032
1003
  IterateDirtyRegions(old_pointer_space_,
1033
- &IteratePointersInDirtyRegion,
1004
+ &Heap::IteratePointersInDirtyRegion,
1034
1005
  &ScavengePointer,
1035
1006
  WATERMARK_CAN_BE_INVALID);
1036
1007
 
@@ -1056,23 +1027,18 @@ void Heap::Scavenge() {
1056
1027
  // Scavenge object reachable from the global contexts list directly.
1057
1028
  scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
1058
1029
 
1059
- // Scavenge objects reachable from the runtime-profiler sampler
1060
- // window directly.
1061
- Object** sampler_window_address = RuntimeProfiler::SamplerWindowAddress();
1062
- int sampler_window_size = RuntimeProfiler::SamplerWindowSize();
1063
- scavenge_visitor.VisitPointers(
1064
- sampler_window_address,
1065
- sampler_window_address + sampler_window_size);
1066
-
1067
1030
  new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1068
1031
 
1069
1032
  UpdateNewSpaceReferencesInExternalStringTable(
1070
1033
  &UpdateNewSpaceReferenceInExternalStringTableEntry);
1071
1034
 
1072
1035
  LiveObjectList::UpdateReferencesForScavengeGC();
1036
+ isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1073
1037
 
1074
1038
  ASSERT(new_space_front == new_space_.top());
1075
1039
 
1040
+ is_safe_to_read_maps_ = true;
1041
+
1076
1042
  // Set age mark.
1077
1043
  new_space_.set_age_mark(new_space_.top());
1078
1044
 
@@ -1080,18 +1046,19 @@ void Heap::Scavenge() {
1080
1046
  IncrementYoungSurvivorsCounter(static_cast<int>(
1081
1047
  (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
1082
1048
 
1083
- LOG(ResourceEvent("scavenge", "end"));
1049
+ LOG(isolate_, ResourceEvent("scavenge", "end"));
1084
1050
 
1085
1051
  gc_state_ = NOT_IN_GC;
1086
1052
  }
1087
1053
 
1088
1054
 
1089
- String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
1055
+ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1056
+ Object** p) {
1090
1057
  MapWord first_word = HeapObject::cast(*p)->map_word();
1091
1058
 
1092
1059
  if (!first_word.IsForwardingAddress()) {
1093
1060
  // Unreachable external string can be finalized.
1094
- FinalizeExternalString(String::cast(*p));
1061
+ heap->FinalizeExternalString(String::cast(*p));
1095
1062
  return NULL;
1096
1063
  }
1097
1064
 
@@ -1102,48 +1069,49 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
1102
1069
 
1103
1070
  void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1104
1071
  ExternalStringTableUpdaterCallback updater_func) {
1105
- ExternalStringTable::Verify();
1072
+ external_string_table_.Verify();
1106
1073
 
1107
- if (ExternalStringTable::new_space_strings_.is_empty()) return;
1074
+ if (external_string_table_.new_space_strings_.is_empty()) return;
1108
1075
 
1109
- Object** start = &ExternalStringTable::new_space_strings_[0];
1110
- Object** end = start + ExternalStringTable::new_space_strings_.length();
1076
+ Object** start = &external_string_table_.new_space_strings_[0];
1077
+ Object** end = start + external_string_table_.new_space_strings_.length();
1111
1078
  Object** last = start;
1112
1079
 
1113
1080
  for (Object** p = start; p < end; ++p) {
1114
- ASSERT(Heap::InFromSpace(*p));
1115
- String* target = updater_func(p);
1081
+ ASSERT(InFromSpace(*p));
1082
+ String* target = updater_func(this, p);
1116
1083
 
1117
1084
  if (target == NULL) continue;
1118
1085
 
1119
1086
  ASSERT(target->IsExternalString());
1120
1087
 
1121
- if (Heap::InNewSpace(target)) {
1088
+ if (InNewSpace(target)) {
1122
1089
  // String is still in new space. Update the table entry.
1123
1090
  *last = target;
1124
1091
  ++last;
1125
1092
  } else {
1126
1093
  // String got promoted. Move it to the old string list.
1127
- ExternalStringTable::AddOldString(target);
1094
+ external_string_table_.AddOldString(target);
1128
1095
  }
1129
1096
  }
1130
1097
 
1131
1098
  ASSERT(last <= end);
1132
- ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start));
1099
+ external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1133
1100
  }
1134
1101
 
1135
1102
 
1136
- static Object* ProcessFunctionWeakReferences(Object* function,
1103
+ static Object* ProcessFunctionWeakReferences(Heap* heap,
1104
+ Object* function,
1137
1105
  WeakObjectRetainer* retainer) {
1138
- Object* head = Heap::undefined_value();
1106
+ Object* head = heap->undefined_value();
1139
1107
  JSFunction* tail = NULL;
1140
1108
  Object* candidate = function;
1141
- while (!candidate->IsUndefined()) {
1109
+ while (candidate != heap->undefined_value()) {
1142
1110
  // Check whether to keep the candidate in the list.
1143
1111
  JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1144
1112
  Object* retain = retainer->RetainAs(candidate);
1145
1113
  if (retain != NULL) {
1146
- if (head->IsUndefined()) {
1114
+ if (head == heap->undefined_value()) {
1147
1115
  // First element in the list.
1148
1116
  head = candidate_function;
1149
1117
  } else {
@@ -1160,7 +1128,7 @@ static Object* ProcessFunctionWeakReferences(Object* function,
1160
1128
 
1161
1129
  // Terminate the list if there is one or more elements.
1162
1130
  if (tail != NULL) {
1163
- tail->set_next_function_link(Heap::undefined_value());
1131
+ tail->set_next_function_link(heap->undefined_value());
1164
1132
  }
1165
1133
 
1166
1134
  return head;
@@ -1171,18 +1139,19 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1171
1139
  Object* head = undefined_value();
1172
1140
  Context* tail = NULL;
1173
1141
  Object* candidate = global_contexts_list_;
1174
- while (!candidate->IsUndefined()) {
1142
+ while (candidate != undefined_value()) {
1175
1143
  // Check whether to keep the candidate in the list.
1176
1144
  Context* candidate_context = reinterpret_cast<Context*>(candidate);
1177
1145
  Object* retain = retainer->RetainAs(candidate);
1178
1146
  if (retain != NULL) {
1179
- if (head->IsUndefined()) {
1147
+ if (head == undefined_value()) {
1180
1148
  // First element in the list.
1181
1149
  head = candidate_context;
1182
1150
  } else {
1183
1151
  // Subsequent elements in the list.
1184
1152
  ASSERT(tail != NULL);
1185
- tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
1153
+ tail->set_unchecked(this,
1154
+ Context::NEXT_CONTEXT_LINK,
1186
1155
  candidate_context,
1187
1156
  UPDATE_WRITE_BARRIER);
1188
1157
  }
@@ -1192,9 +1161,11 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1192
1161
  // Process the weak list of optimized functions for the context.
1193
1162
  Object* function_list_head =
1194
1163
  ProcessFunctionWeakReferences(
1164
+ this,
1195
1165
  candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1196
1166
  retainer);
1197
- candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST,
1167
+ candidate_context->set_unchecked(this,
1168
+ Context::OPTIMIZED_FUNCTIONS_LIST,
1198
1169
  function_list_head,
1199
1170
  UPDATE_WRITE_BARRIER);
1200
1171
  }
@@ -1204,21 +1175,22 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1204
1175
 
1205
1176
  // Terminate the list if there is one or more elements.
1206
1177
  if (tail != NULL) {
1207
- tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
1178
+ tail->set_unchecked(this,
1179
+ Context::NEXT_CONTEXT_LINK,
1208
1180
  Heap::undefined_value(),
1209
1181
  UPDATE_WRITE_BARRIER);
1210
1182
  }
1211
1183
 
1212
1184
  // Update the head of the list of contexts.
1213
- Heap::global_contexts_list_ = head;
1185
+ global_contexts_list_ = head;
1214
1186
  }
1215
1187
 
1216
1188
 
1217
1189
  class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1218
1190
  public:
1219
- static inline void VisitPointer(Object** p) {
1191
+ static inline void VisitPointer(Heap* heap, Object** p) {
1220
1192
  Object* object = *p;
1221
- if (!Heap::InNewSpace(object)) return;
1193
+ if (!heap->InNewSpace(object)) return;
1222
1194
  Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1223
1195
  reinterpret_cast<HeapObject*>(object));
1224
1196
  }
@@ -1239,10 +1211,10 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1239
1211
  }
1240
1212
 
1241
1213
  // Promote and process all the to-be-promoted objects.
1242
- while (!promotion_queue.is_empty()) {
1214
+ while (!promotion_queue_.is_empty()) {
1243
1215
  HeapObject* target;
1244
1216
  int size;
1245
- promotion_queue.remove(&target, &size);
1217
+ promotion_queue_.remove(&target, &size);
1246
1218
 
1247
1219
  // Promoted object might be already partially visited
1248
1220
  // during dirty regions iteration. Thus we search specificly
@@ -1262,6 +1234,32 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1262
1234
  }
1263
1235
 
1264
1236
 
1237
+ enum LoggingAndProfiling {
1238
+ LOGGING_AND_PROFILING_ENABLED,
1239
+ LOGGING_AND_PROFILING_DISABLED
1240
+ };
1241
+
1242
+
1243
+ typedef void (*ScavengingCallback)(Map* map,
1244
+ HeapObject** slot,
1245
+ HeapObject* object);
1246
+
1247
+
1248
+ static Atomic32 scavenging_visitors_table_mode_;
1249
+ static VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
1250
+
1251
+
1252
+ INLINE(static void DoScavengeObject(Map* map,
1253
+ HeapObject** slot,
1254
+ HeapObject* obj));
1255
+
1256
+
1257
+ void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1258
+ scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1259
+ }
1260
+
1261
+
1262
+ template<LoggingAndProfiling logging_and_profiling_mode>
1265
1263
  class ScavengingVisitor : public StaticVisitorBase {
1266
1264
  public:
1267
1265
  static void Initialize() {
@@ -1270,23 +1268,22 @@ class ScavengingVisitor : public StaticVisitorBase {
1270
1268
  table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1271
1269
  table_.Register(kVisitByteArray, &EvacuateByteArray);
1272
1270
  table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1271
+
1273
1272
  table_.Register(kVisitGlobalContext,
1274
1273
  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1275
- VisitSpecialized<Context::kSize>);
1276
-
1277
- typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
1274
+ template VisitSpecialized<Context::kSize>);
1278
1275
 
1279
1276
  table_.Register(kVisitConsString,
1280
1277
  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1281
- VisitSpecialized<ConsString::kSize>);
1278
+ template VisitSpecialized<ConsString::kSize>);
1282
1279
 
1283
1280
  table_.Register(kVisitSharedFunctionInfo,
1284
1281
  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1285
- VisitSpecialized<SharedFunctionInfo::kSize>);
1282
+ template VisitSpecialized<SharedFunctionInfo::kSize>);
1286
1283
 
1287
1284
  table_.Register(kVisitJSFunction,
1288
1285
  &ObjectEvacuationStrategy<POINTER_OBJECT>::
1289
- VisitSpecialized<JSFunction::kSize>);
1286
+ template VisitSpecialized<JSFunction::kSize>);
1290
1287
 
1291
1288
  table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
1292
1289
  kVisitDataObject,
@@ -1301,18 +1298,16 @@ class ScavengingVisitor : public StaticVisitorBase {
1301
1298
  kVisitStructGeneric>();
1302
1299
  }
1303
1300
 
1304
-
1305
- static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
1306
- table_.GetVisitor(map)(map, slot, obj);
1301
+ static VisitorDispatchTable<ScavengingCallback>* GetTable() {
1302
+ return &table_;
1307
1303
  }
1308
1304
 
1309
-
1310
1305
  private:
1311
1306
  enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1312
1307
  enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1313
1308
 
1314
1309
  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1315
- static void RecordCopiedObject(HeapObject* obj) {
1310
+ static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1316
1311
  bool should_record = false;
1317
1312
  #ifdef DEBUG
1318
1313
  should_record = FLAG_heap_stats;
@@ -1321,10 +1316,10 @@ class ScavengingVisitor : public StaticVisitorBase {
1321
1316
  should_record = should_record || FLAG_log_gc;
1322
1317
  #endif
1323
1318
  if (should_record) {
1324
- if (Heap::new_space()->Contains(obj)) {
1325
- Heap::new_space()->RecordAllocation(obj);
1319
+ if (heap->new_space()->Contains(obj)) {
1320
+ heap->new_space()->RecordAllocation(obj);
1326
1321
  } else {
1327
- Heap::new_space()->RecordPromotion(obj);
1322
+ heap->new_space()->RecordPromotion(obj);
1328
1323
  }
1329
1324
  }
1330
1325
  }
@@ -1333,27 +1328,34 @@ class ScavengingVisitor : public StaticVisitorBase {
1333
1328
  // Helper function used by CopyObject to copy a source object to an
1334
1329
  // allocated target object and update the forwarding pointer in the source
1335
1330
  // object. Returns the target object.
1336
- INLINE(static HeapObject* MigrateObject(HeapObject* source,
1331
+ INLINE(static HeapObject* MigrateObject(Heap* heap,
1332
+ HeapObject* source,
1337
1333
  HeapObject* target,
1338
1334
  int size)) {
1339
1335
  // Copy the content of source to target.
1340
- Heap::CopyBlock(target->address(), source->address(), size);
1336
+ heap->CopyBlock(target->address(), source->address(), size);
1341
1337
 
1342
1338
  // Set the forwarding address.
1343
1339
  source->set_map_word(MapWord::FromForwardingAddress(target));
1344
1340
 
1341
+ if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1345
1342
  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1346
- // Update NewSpace stats if necessary.
1347
- RecordCopiedObject(target);
1343
+ // Update NewSpace stats if necessary.
1344
+ RecordCopiedObject(heap, target);
1348
1345
  #endif
1349
- HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
1346
+ HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1350
1347
  #if defined(ENABLE_LOGGING_AND_PROFILING)
1351
- if (Logger::is_logging() || CpuProfiler::is_profiling()) {
1352
- if (target->IsSharedFunctionInfo()) {
1353
- PROFILE(SFIMoveEvent(source->address(), target->address()));
1348
+ Isolate* isolate = heap->isolate();
1349
+ if (isolate->logger()->is_logging() ||
1350
+ isolate->cpu_profiler()->is_profiling()) {
1351
+ if (target->IsSharedFunctionInfo()) {
1352
+ PROFILE(isolate, SharedFunctionInfoMoveEvent(
1353
+ source->address(), target->address()));
1354
+ }
1354
1355
  }
1355
- }
1356
1356
  #endif
1357
+ }
1358
+
1357
1359
  return target;
1358
1360
  }
1359
1361
 
@@ -1367,36 +1369,37 @@ class ScavengingVisitor : public StaticVisitorBase {
1367
1369
  (object_size <= Page::kMaxHeapObjectSize));
1368
1370
  ASSERT(object->Size() == object_size);
1369
1371
 
1370
- if (Heap::ShouldBePromoted(object->address(), object_size)) {
1372
+ Heap* heap = map->heap();
1373
+ if (heap->ShouldBePromoted(object->address(), object_size)) {
1371
1374
  MaybeObject* maybe_result;
1372
1375
 
1373
1376
  if ((size_restriction != SMALL) &&
1374
1377
  (object_size > Page::kMaxHeapObjectSize)) {
1375
- maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size);
1378
+ maybe_result = heap->lo_space()->AllocateRawFixedArray(object_size);
1376
1379
  } else {
1377
1380
  if (object_contents == DATA_OBJECT) {
1378
- maybe_result = Heap::old_data_space()->AllocateRaw(object_size);
1381
+ maybe_result = heap->old_data_space()->AllocateRaw(object_size);
1379
1382
  } else {
1380
- maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size);
1383
+ maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
1381
1384
  }
1382
1385
  }
1383
1386
 
1384
1387
  Object* result = NULL; // Initialization to please compiler.
1385
1388
  if (maybe_result->ToObject(&result)) {
1386
1389
  HeapObject* target = HeapObject::cast(result);
1387
- *slot = MigrateObject(object, target, object_size);
1390
+ *slot = MigrateObject(heap, object , target, object_size);
1388
1391
 
1389
1392
  if (object_contents == POINTER_OBJECT) {
1390
- promotion_queue.insert(target, object_size);
1393
+ heap->promotion_queue()->insert(target, object_size);
1391
1394
  }
1392
1395
 
1393
- Heap::tracer()->increment_promoted_objects_size(object_size);
1396
+ heap->tracer()->increment_promoted_objects_size(object_size);
1394
1397
  return;
1395
1398
  }
1396
1399
  }
1397
1400
  Object* result =
1398
- Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
1399
- *slot = MigrateObject(object, HeapObject::cast(result), object_size);
1401
+ heap->new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
1402
+ *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
1400
1403
  return;
1401
1404
  }
1402
1405
 
@@ -1447,13 +1450,14 @@ class ScavengingVisitor : public StaticVisitorBase {
1447
1450
  HeapObject* object) {
1448
1451
  ASSERT(IsShortcutCandidate(map->instance_type()));
1449
1452
 
1450
- if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
1453
+ if (ConsString::cast(object)->unchecked_second() ==
1454
+ map->heap()->empty_string()) {
1451
1455
  HeapObject* first =
1452
1456
  HeapObject::cast(ConsString::cast(object)->unchecked_first());
1453
1457
 
1454
1458
  *slot = first;
1455
1459
 
1456
- if (!Heap::InNewSpace(first)) {
1460
+ if (!map->heap()->InNewSpace(first)) {
1457
1461
  object->set_map_word(MapWord::FromForwardingAddress(first));
1458
1462
  return;
1459
1463
  }
@@ -1467,7 +1471,7 @@ class ScavengingVisitor : public StaticVisitorBase {
1467
1471
  return;
1468
1472
  }
1469
1473
 
1470
- Scavenge(first->map(), slot, first);
1474
+ DoScavengeObject(first->map(), slot, first);
1471
1475
  object->set_map_word(MapWord::FromForwardingAddress(*slot));
1472
1476
  return;
1473
1477
  }
@@ -1494,26 +1498,59 @@ class ScavengingVisitor : public StaticVisitorBase {
1494
1498
  }
1495
1499
  };
1496
1500
 
1497
- typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
1498
-
1499
- static VisitorDispatchTable<Callback> table_;
1501
+ static VisitorDispatchTable<ScavengingCallback> table_;
1500
1502
  };
1501
1503
 
1502
1504
 
1503
- VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
1505
+ template<LoggingAndProfiling logging_and_profiling_mode>
1506
+ VisitorDispatchTable<ScavengingCallback>
1507
+ ScavengingVisitor<logging_and_profiling_mode>::table_;
1508
+
1509
+
1510
+ static void InitializeScavengingVisitorsTables() {
1511
+ ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize();
1512
+ ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize();
1513
+ scavenging_visitors_table_.CopyFrom(
1514
+ ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable());
1515
+ scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED;
1516
+ }
1517
+
1518
+
1519
+ void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
1520
+ if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
1521
+ // Table was already updated by some isolate.
1522
+ return;
1523
+ }
1524
+
1525
+ if (isolate()->logger()->is_logging() ||
1526
+ isolate()->cpu_profiler()->is_profiling() ||
1527
+ (isolate()->heap_profiler() != NULL &&
1528
+ isolate()->heap_profiler()->is_profiling())) {
1529
+ // If one of the isolates is doing scavenge at this moment of time
1530
+ // it might see this table in an inconsitent state when
1531
+ // some of the callbacks point to
1532
+ // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others
1533
+ // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>.
1534
+ // However this does not lead to any bugs as such isolate does not have
1535
+ // profiling enabled and any isolate with enabled profiling is guaranteed
1536
+ // to see the table in the consistent state.
1537
+ scavenging_visitors_table_.CopyFrom(
1538
+ ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable());
1539
+
1540
+ // We use Release_Store to prevent reordering of this write before writes
1541
+ // to the table.
1542
+ Release_Store(&scavenging_visitors_table_mode_,
1543
+ LOGGING_AND_PROFILING_ENABLED);
1544
+ }
1545
+ }
1504
1546
 
1505
1547
 
1506
1548
  void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1507
- ASSERT(InFromSpace(object));
1549
+ ASSERT(HEAP->InFromSpace(object));
1508
1550
  MapWord first_word = object->map_word();
1509
1551
  ASSERT(!first_word.IsForwardingAddress());
1510
1552
  Map* map = first_word.ToMap();
1511
- ScavengingVisitor::Scavenge(map, p, object);
1512
- }
1513
-
1514
-
1515
- void Heap::ScavengePointer(HeapObject** p) {
1516
- ScavengeObject(p, *p);
1553
+ DoScavengeObject(map, p, object);
1517
1554
  }
1518
1555
 
1519
1556
 
@@ -1528,9 +1565,8 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1528
1565
  reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1529
1566
  reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1530
1567
  reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1531
- reinterpret_cast<Map*>(result)->
1532
- set_visitor_id(
1533
- StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1568
+ reinterpret_cast<Map*>(result)->set_visitor_id(
1569
+ StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1534
1570
  reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1535
1571
  reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1536
1572
  reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1639,6 +1675,7 @@ bool Heap::CreateInitialMaps() {
1639
1675
  if (!maybe_obj->ToObject(&obj)) return false;
1640
1676
  }
1641
1677
  set_null_value(obj);
1678
+ Oddball::cast(obj)->set_kind(Oddball::kNull);
1642
1679
 
1643
1680
  // Allocate the empty descriptor array.
1644
1681
  { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
@@ -1716,10 +1753,10 @@ bool Heap::CreateInitialMaps() {
1716
1753
  set_empty_byte_array(ByteArray::cast(obj));
1717
1754
 
1718
1755
  { MaybeObject* maybe_obj =
1719
- AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
1756
+ AllocateMap(EXTERNAL_PIXEL_ARRAY_TYPE, ExternalArray::kAlignedSize);
1720
1757
  if (!maybe_obj->ToObject(&obj)) return false;
1721
1758
  }
1722
- set_pixel_array_map(Map::cast(obj));
1759
+ set_external_pixel_array_map(Map::cast(obj));
1723
1760
 
1724
1761
  { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
1725
1762
  ExternalArray::kAlignedSize);
@@ -1830,7 +1867,7 @@ bool Heap::CreateInitialMaps() {
1830
1867
  }
1831
1868
  set_message_object_map(Map::cast(obj));
1832
1869
 
1833
- ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1870
+ ASSERT(!InNewSpace(empty_fixed_array()));
1834
1871
  return true;
1835
1872
  }
1836
1873
 
@@ -1883,12 +1920,13 @@ MaybeObject* Heap::AllocateJSGlobalPropertyCell(Object* value) {
1883
1920
 
1884
1921
 
1885
1922
  MaybeObject* Heap::CreateOddball(const char* to_string,
1886
- Object* to_number) {
1923
+ Object* to_number,
1924
+ byte kind) {
1887
1925
  Object* result;
1888
1926
  { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE);
1889
1927
  if (!maybe_result->ToObject(&result)) return maybe_result;
1890
1928
  }
1891
- return Oddball::cast(result)->Initialize(to_string, to_number);
1929
+ return Oddball::cast(result)->Initialize(to_string, to_number, kind);
1892
1930
  }
1893
1931
 
1894
1932
 
@@ -1900,7 +1938,7 @@ bool Heap::CreateApiObjects() {
1900
1938
  }
1901
1939
  set_neander_map(Map::cast(obj));
1902
1940
 
1903
- { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map());
1941
+ { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
1904
1942
  if (!maybe_obj->ToObject(&obj)) return false;
1905
1943
  }
1906
1944
  Object* elements;
@@ -1915,20 +1953,6 @@ bool Heap::CreateApiObjects() {
1915
1953
  }
1916
1954
 
1917
1955
 
1918
- void Heap::CreateCEntryStub() {
1919
- CEntryStub stub(1);
1920
- set_c_entry_code(*stub.GetCode());
1921
- }
1922
-
1923
-
1924
- #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
1925
- void Heap::CreateRegExpCEntryStub() {
1926
- RegExpCEntryStub stub;
1927
- set_re_c_entry_code(*stub.GetCode());
1928
- }
1929
- #endif
1930
-
1931
-
1932
1956
  void Heap::CreateJSEntryStub() {
1933
1957
  JSEntryStub stub;
1934
1958
  set_js_entry_code(*stub.GetCode());
@@ -1941,14 +1965,6 @@ void Heap::CreateJSConstructEntryStub() {
1941
1965
  }
1942
1966
 
1943
1967
 
1944
- #if V8_TARGET_ARCH_ARM
1945
- void Heap::CreateDirectCEntryStub() {
1946
- DirectCEntryStub stub;
1947
- set_direct_c_entry_code(*stub.GetCode());
1948
- }
1949
- #endif
1950
-
1951
-
1952
1968
  void Heap::CreateFixedStubs() {
1953
1969
  // Here we create roots for fixed stubs. They are needed at GC
1954
1970
  // for cooking and uncooking (check out frames.cc).
@@ -1956,22 +1972,15 @@ void Heap::CreateFixedStubs() {
1956
1972
  // stub cache for these stubs.
1957
1973
  HandleScope scope;
1958
1974
  // gcc-4.4 has problem generating correct code of following snippet:
1959
- // { CEntryStub stub;
1960
- // c_entry_code_ = *stub.GetCode();
1975
+ // { JSEntryStub stub;
1976
+ // js_entry_code_ = *stub.GetCode();
1961
1977
  // }
1962
- // { DebuggerStatementStub stub;
1963
- // debugger_statement_code_ = *stub.GetCode();
1978
+ // { JSConstructEntryStub stub;
1979
+ // js_construct_entry_code_ = *stub.GetCode();
1964
1980
  // }
1965
1981
  // To workaround the problem, make separate functions without inlining.
1966
- Heap::CreateCEntryStub();
1967
1982
  Heap::CreateJSEntryStub();
1968
1983
  Heap::CreateJSConstructEntryStub();
1969
- #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
1970
- Heap::CreateRegExpCEntryStub();
1971
- #endif
1972
- #if V8_TARGET_ARCH_ARM
1973
- Heap::CreateDirectCEntryStub();
1974
- #endif
1975
1984
  }
1976
1985
 
1977
1986
 
@@ -1994,6 +2003,7 @@ bool Heap::CreateInitialObjects() {
1994
2003
  if (!maybe_obj->ToObject(&obj)) return false;
1995
2004
  }
1996
2005
  set_undefined_value(obj);
2006
+ Oddball::cast(obj)->set_kind(Oddball::kUndefined);
1997
2007
  ASSERT(!InNewSpace(undefined_value()));
1998
2008
 
1999
2009
  // Allocate initial symbol table.
@@ -2013,39 +2023,50 @@ bool Heap::CreateInitialObjects() {
2013
2023
 
2014
2024
  // Allocate the null_value
2015
2025
  { MaybeObject* maybe_obj =
2016
- Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
2026
+ Oddball::cast(null_value())->Initialize("null",
2027
+ Smi::FromInt(0),
2028
+ Oddball::kNull);
2017
2029
  if (!maybe_obj->ToObject(&obj)) return false;
2018
2030
  }
2019
2031
 
2020
- { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1));
2032
+ { MaybeObject* maybe_obj = CreateOddball("true",
2033
+ Smi::FromInt(1),
2034
+ Oddball::kTrue);
2021
2035
  if (!maybe_obj->ToObject(&obj)) return false;
2022
2036
  }
2023
2037
  set_true_value(obj);
2024
2038
 
2025
- { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0));
2039
+ { MaybeObject* maybe_obj = CreateOddball("false",
2040
+ Smi::FromInt(0),
2041
+ Oddball::kFalse);
2026
2042
  if (!maybe_obj->ToObject(&obj)) return false;
2027
2043
  }
2028
2044
  set_false_value(obj);
2029
2045
 
2030
- { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1));
2046
+ { MaybeObject* maybe_obj = CreateOddball("hole",
2047
+ Smi::FromInt(-1),
2048
+ Oddball::kTheHole);
2031
2049
  if (!maybe_obj->ToObject(&obj)) return false;
2032
2050
  }
2033
2051
  set_the_hole_value(obj);
2034
2052
 
2035
2053
  { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
2036
- Smi::FromInt(-4));
2054
+ Smi::FromInt(-4),
2055
+ Oddball::kArgumentMarker);
2037
2056
  if (!maybe_obj->ToObject(&obj)) return false;
2038
2057
  }
2039
2058
  set_arguments_marker(obj);
2040
2059
 
2041
- { MaybeObject* maybe_obj =
2042
- CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2));
2060
+ { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
2061
+ Smi::FromInt(-2),
2062
+ Oddball::kOther);
2043
2063
  if (!maybe_obj->ToObject(&obj)) return false;
2044
2064
  }
2045
2065
  set_no_interceptor_result_sentinel(obj);
2046
2066
 
2047
- { MaybeObject* maybe_obj =
2048
- CreateOddball("termination_exception", Smi::FromInt(-3));
2067
+ { MaybeObject* maybe_obj = CreateOddball("termination_exception",
2068
+ Smi::FromInt(-3),
2069
+ Oddball::kOther);
2049
2070
  if (!maybe_obj->ToObject(&obj)) return false;
2050
2071
  }
2051
2072
  set_termination_exception(obj);
@@ -2107,7 +2128,8 @@ bool Heap::CreateInitialObjects() {
2107
2128
  { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
2108
2129
  if (!maybe_obj->ToObject(&obj)) return false;
2109
2130
  }
2110
- { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj);
2131
+ { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
2132
+ obj);
2111
2133
  if (!maybe_obj->ToObject(&obj)) return false;
2112
2134
  }
2113
2135
  set_intrinsic_function_names(StringDictionary::cast(obj));
@@ -2127,20 +2149,20 @@ bool Heap::CreateInitialObjects() {
2127
2149
  }
2128
2150
  set_natives_source_cache(FixedArray::cast(obj));
2129
2151
 
2130
- // Handling of script id generation is in Factory::NewScript.
2152
+ // Handling of script id generation is in FACTORY->NewScript.
2131
2153
  set_last_script_id(undefined_value());
2132
2154
 
2133
2155
  // Initialize keyed lookup cache.
2134
- KeyedLookupCache::Clear();
2156
+ isolate_->keyed_lookup_cache()->Clear();
2135
2157
 
2136
2158
  // Initialize context slot cache.
2137
- ContextSlotCache::Clear();
2159
+ isolate_->context_slot_cache()->Clear();
2138
2160
 
2139
2161
  // Initialize descriptor cache.
2140
- DescriptorLookupCache::Clear();
2162
+ isolate_->descriptor_lookup_cache()->Clear();
2141
2163
 
2142
2164
  // Initialize compilation cache.
2143
- CompilationCache::Clear();
2165
+ isolate_->compilation_cache()->Clear();
2144
2166
 
2145
2167
  return true;
2146
2168
  }
@@ -2164,7 +2186,7 @@ void Heap::FlushNumberStringCache() {
2164
2186
  // Flush the number to string cache.
2165
2187
  int len = number_string_cache()->length();
2166
2188
  for (int i = 0; i < len; i++) {
2167
- number_string_cache()->set_undefined(i);
2189
+ number_string_cache()->set_undefined(this, i);
2168
2190
  }
2169
2191
  }
2170
2192
 
@@ -2216,7 +2238,7 @@ void Heap::SetNumberStringCache(Object* number, String* string) {
2216
2238
 
2217
2239
  MaybeObject* Heap::NumberToString(Object* number,
2218
2240
  bool check_number_string_cache) {
2219
- Counters::number_to_string_runtime.Increment();
2241
+ isolate_->counters()->number_to_string_runtime()->Increment();
2220
2242
  if (check_number_string_cache) {
2221
2243
  Object* cached = GetNumberStringCache(number);
2222
2244
  if (cached != undefined_value()) {
@@ -2266,6 +2288,8 @@ Heap::RootListIndex Heap::RootIndexForExternalArrayType(
2266
2288
  return kExternalUnsignedIntArrayMapRootIndex;
2267
2289
  case kExternalFloatArray:
2268
2290
  return kExternalFloatArrayMapRootIndex;
2291
+ case kExternalPixelArray:
2292
+ return kExternalPixelArrayMapRootIndex;
2269
2293
  default:
2270
2294
  UNREACHABLE();
2271
2295
  return kUndefinedValueRootIndex;
@@ -2317,10 +2341,11 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
2317
2341
 
2318
2342
  SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
2319
2343
  share->set_name(name);
2320
- Code* illegal = Builtins::builtin(Builtins::Illegal);
2344
+ Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
2321
2345
  share->set_code(illegal);
2322
2346
  share->set_scope_info(SerializedScopeInfo::Empty());
2323
- Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
2347
+ Code* construct_stub = isolate_->builtins()->builtin(
2348
+ Builtins::kJSConstructStubGeneric);
2324
2349
  share->set_construct_stub(construct_stub);
2325
2350
  share->set_expected_nof_properties(0);
2326
2351
  share->set_length(0);
@@ -2378,20 +2403,21 @@ static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
2378
2403
 
2379
2404
 
2380
2405
  MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
2406
+ Heap* heap,
2381
2407
  uint32_t c1,
2382
2408
  uint32_t c2) {
2383
2409
  String* symbol;
2384
2410
  // Numeric strings have a different hash algorithm not known by
2385
2411
  // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
2386
2412
  if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
2387
- Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
2413
+ heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
2388
2414
  return symbol;
2389
2415
  // Now we know the length is 2, we might as well make use of that fact
2390
2416
  // when building the new string.
2391
2417
  } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
2392
2418
  ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
2393
2419
  Object* result;
2394
- { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2);
2420
+ { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
2395
2421
  if (!maybe_result->ToObject(&result)) return maybe_result;
2396
2422
  }
2397
2423
  char* dest = SeqAsciiString::cast(result)->GetChars();
@@ -2400,7 +2426,7 @@ MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
2400
2426
  return result;
2401
2427
  } else {
2402
2428
  Object* result;
2403
- { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2);
2429
+ { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
2404
2430
  if (!maybe_result->ToObject(&result)) return maybe_result;
2405
2431
  }
2406
2432
  uc16* dest = SeqTwoByteString::cast(result)->GetChars();
@@ -2430,7 +2456,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2430
2456
  if (length == 2) {
2431
2457
  unsigned c1 = first->Get(0);
2432
2458
  unsigned c2 = second->Get(0);
2433
- return MakeOrFindTwoCharacterString(c1, c2);
2459
+ return MakeOrFindTwoCharacterString(this, c1, c2);
2434
2460
  }
2435
2461
 
2436
2462
  bool first_is_ascii = first->IsAsciiRepresentation();
@@ -2440,7 +2466,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2440
2466
  // Make sure that an out of memory exception is thrown if the length
2441
2467
  // of the new cons string is too large.
2442
2468
  if (length > String::kMaxLength || length < 0) {
2443
- Top::context()->mark_out_of_memory();
2469
+ isolate()->context()->mark_out_of_memory();
2444
2470
  return Failure::OutOfMemoryException();
2445
2471
  }
2446
2472
 
@@ -2452,7 +2478,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2452
2478
  is_ascii_data_in_two_byte_string =
2453
2479
  first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
2454
2480
  if (is_ascii_data_in_two_byte_string) {
2455
- Counters::string_add_runtime_ext_to_ascii.Increment();
2481
+ isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2456
2482
  }
2457
2483
  }
2458
2484
 
@@ -2493,6 +2519,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2493
2519
  char* dest = SeqAsciiString::cast(result)->GetChars();
2494
2520
  String::WriteToFlat(first, dest, 0, first_length);
2495
2521
  String::WriteToFlat(second, dest + first_length, 0, second_length);
2522
+ isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2496
2523
  return result;
2497
2524
  }
2498
2525
 
@@ -2534,15 +2561,14 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
2534
2561
  int length = end - start;
2535
2562
 
2536
2563
  if (length == 1) {
2537
- return Heap::LookupSingleCharacterStringFromCode(
2538
- buffer->Get(start));
2564
+ return LookupSingleCharacterStringFromCode(buffer->Get(start));
2539
2565
  } else if (length == 2) {
2540
2566
  // Optimization for 2-byte strings often used as keys in a decompression
2541
2567
  // dictionary. Check whether we already have the string in the symbol
2542
2568
  // table to prevent creation of many unneccesary strings.
2543
2569
  unsigned c1 = buffer->Get(start);
2544
2570
  unsigned c2 = buffer->Get(start + 1);
2545
- return MakeOrFindTwoCharacterString(c1, c2);
2571
+ return MakeOrFindTwoCharacterString(this, c1, c2);
2546
2572
  }
2547
2573
 
2548
2574
  // Make an attempt to flatten the buffer to reduce access time.
@@ -2574,7 +2600,7 @@ MaybeObject* Heap::AllocateExternalStringFromAscii(
2574
2600
  ExternalAsciiString::Resource* resource) {
2575
2601
  size_t length = resource->length();
2576
2602
  if (length > static_cast<size_t>(String::kMaxLength)) {
2577
- Top::context()->mark_out_of_memory();
2603
+ isolate()->context()->mark_out_of_memory();
2578
2604
  return Failure::OutOfMemoryException();
2579
2605
  }
2580
2606
 
@@ -2597,7 +2623,7 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2597
2623
  ExternalTwoByteString::Resource* resource) {
2598
2624
  size_t length = resource->length();
2599
2625
  if (length > static_cast<size_t>(String::kMaxLength)) {
2600
- Top::context()->mark_out_of_memory();
2626
+ isolate()->context()->mark_out_of_memory();
2601
2627
  return Failure::OutOfMemoryException();
2602
2628
  }
2603
2629
 
@@ -2607,7 +2633,7 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2607
2633
  bool is_ascii = length <= kAsciiCheckLengthLimit &&
2608
2634
  String::IsAscii(resource->data(), static_cast<int>(length));
2609
2635
  Map* map = is_ascii ?
2610
- Heap::external_string_with_ascii_data_map() : Heap::external_string_map();
2636
+ external_string_with_ascii_data_map() : external_string_map();
2611
2637
  Object* result;
2612
2638
  { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2613
2639
  if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -2624,8 +2650,8 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2624
2650
 
2625
2651
  MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
2626
2652
  if (code <= String::kMaxAsciiCharCode) {
2627
- Object* value = Heap::single_character_string_cache()->get(code);
2628
- if (value != Heap::undefined_value()) return value;
2653
+ Object* value = single_character_string_cache()->get(code);
2654
+ if (value != undefined_value()) return value;
2629
2655
 
2630
2656
  char buffer[1];
2631
2657
  buffer[0] = static_cast<char>(code);
@@ -2633,12 +2659,12 @@ MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
2633
2659
  MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
2634
2660
 
2635
2661
  if (!maybe_result->ToObject(&result)) return maybe_result;
2636
- Heap::single_character_string_cache()->set(code, result);
2662
+ single_character_string_cache()->set(code, result);
2637
2663
  return result;
2638
2664
  }
2639
2665
 
2640
2666
  Object* result;
2641
- { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1);
2667
+ { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
2642
2668
  if (!maybe_result->ToObject(&result)) return maybe_result;
2643
2669
  }
2644
2670
  String* answer = String::cast(result);
@@ -2700,24 +2726,6 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
2700
2726
  }
2701
2727
 
2702
2728
 
2703
- MaybeObject* Heap::AllocatePixelArray(int length,
2704
- uint8_t* external_pointer,
2705
- PretenureFlag pretenure) {
2706
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2707
- Object* result;
2708
- { MaybeObject* maybe_result =
2709
- AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
2710
- if (!maybe_result->ToObject(&result)) return maybe_result;
2711
- }
2712
-
2713
- reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
2714
- reinterpret_cast<PixelArray*>(result)->set_length(length);
2715
- reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
2716
-
2717
- return result;
2718
- }
2719
-
2720
-
2721
2729
  MaybeObject* Heap::AllocateExternalArray(int length,
2722
2730
  ExternalArrayType array_type,
2723
2731
  void* external_pointer,
@@ -2742,7 +2750,8 @@ MaybeObject* Heap::AllocateExternalArray(int length,
2742
2750
 
2743
2751
  MaybeObject* Heap::CreateCode(const CodeDesc& desc,
2744
2752
  Code::Flags flags,
2745
- Handle<Object> self_reference) {
2753
+ Handle<Object> self_reference,
2754
+ bool immovable) {
2746
2755
  // Allocate ByteArray before the Code object, so that we do not risk
2747
2756
  // leaving uninitialized Code object (and breaking the heap).
2748
2757
  Object* reloc_info;
@@ -2750,12 +2759,14 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
2750
2759
  if (!maybe_reloc_info->ToObject(&reloc_info)) return maybe_reloc_info;
2751
2760
  }
2752
2761
 
2753
- // Compute size
2762
+ // Compute size.
2754
2763
  int body_size = RoundUp(desc.instr_size, kObjectAlignment);
2755
2764
  int obj_size = Code::SizeFor(body_size);
2756
2765
  ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
2757
2766
  MaybeObject* maybe_result;
2758
- if (obj_size > MaxObjectSizeInPagedSpace()) {
2767
+ // Large code objects and code objects which should stay at a fixed address
2768
+ // are allocated in large object space.
2769
+ if (obj_size > MaxObjectSizeInPagedSpace() || immovable) {
2759
2770
  maybe_result = lo_space_->AllocateRawCode(obj_size);
2760
2771
  } else {
2761
2772
  maybe_result = code_space_->AllocateRaw(obj_size);
@@ -2767,7 +2778,8 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
2767
2778
  // Initialize the object
2768
2779
  HeapObject::cast(result)->set_map(code_map());
2769
2780
  Code* code = Code::cast(result);
2770
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2781
+ ASSERT(!isolate_->code_range()->exists() ||
2782
+ isolate_->code_range()->contains(code->address()));
2771
2783
  code->set_instruction_size(desc.instr_size);
2772
2784
  code->set_relocation_info(ByteArray::cast(reloc_info));
2773
2785
  code->set_flags(flags);
@@ -2813,7 +2825,8 @@ MaybeObject* Heap::CopyCode(Code* code) {
2813
2825
  CopyBlock(new_addr, old_addr, obj_size);
2814
2826
  // Relocate the copy.
2815
2827
  Code* new_code = Code::cast(result);
2816
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2828
+ ASSERT(!isolate_->code_range()->exists() ||
2829
+ isolate_->code_range()->contains(code->address()));
2817
2830
  new_code->Relocate(new_addr - old_addr);
2818
2831
  return new_code;
2819
2832
  }
@@ -2862,7 +2875,8 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
2862
2875
  memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
2863
2876
 
2864
2877
  // Relocate the copy.
2865
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2878
+ ASSERT(!isolate_->code_range()->exists() ||
2879
+ isolate_->code_range()->contains(code->address()));
2866
2880
  new_code->Relocate(new_addr - old_addr);
2867
2881
 
2868
2882
  #ifdef DEBUG
@@ -2886,7 +2900,7 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
2886
2900
  }
2887
2901
  HeapObject::cast(result)->set_map(map);
2888
2902
  #ifdef ENABLE_LOGGING_AND_PROFILING
2889
- ProducerHeapProfile::RecordJSObjectAllocation(result);
2903
+ isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
2890
2904
  #endif
2891
2905
  return result;
2892
2906
  }
@@ -2948,22 +2962,34 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
2948
2962
  // To get fast allocation and map sharing for arguments objects we
2949
2963
  // allocate them based on an arguments boilerplate.
2950
2964
 
2965
+ JSObject* boilerplate;
2966
+ int arguments_object_size;
2967
+ bool strict_mode_callee = callee->IsJSFunction() &&
2968
+ JSFunction::cast(callee)->shared()->strict_mode();
2969
+ if (strict_mode_callee) {
2970
+ boilerplate =
2971
+ isolate()->context()->global_context()->
2972
+ strict_mode_arguments_boilerplate();
2973
+ arguments_object_size = kArgumentsObjectSizeStrict;
2974
+ } else {
2975
+ boilerplate =
2976
+ isolate()->context()->global_context()->arguments_boilerplate();
2977
+ arguments_object_size = kArgumentsObjectSize;
2978
+ }
2979
+
2951
2980
  // This calls Copy directly rather than using Heap::AllocateRaw so we
2952
2981
  // duplicate the check here.
2953
2982
  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2954
2983
 
2955
- JSObject* boilerplate =
2956
- Top::context()->global_context()->arguments_boilerplate();
2957
-
2958
2984
  // Check that the size of the boilerplate matches our
2959
2985
  // expectations. The ArgumentsAccessStub::GenerateNewObject relies
2960
2986
  // on the size being a known constant.
2961
- ASSERT(kArgumentsObjectSize == boilerplate->map()->instance_size());
2987
+ ASSERT(arguments_object_size == boilerplate->map()->instance_size());
2962
2988
 
2963
2989
  // Do the allocation.
2964
2990
  Object* result;
2965
2991
  { MaybeObject* maybe_result =
2966
- AllocateRaw(kArgumentsObjectSize, NEW_SPACE, OLD_POINTER_SPACE);
2992
+ AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
2967
2993
  if (!maybe_result->ToObject(&result)) return maybe_result;
2968
2994
  }
2969
2995
 
@@ -2972,14 +2998,17 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
2972
2998
  // barrier here.
2973
2999
  CopyBlock(HeapObject::cast(result)->address(),
2974
3000
  boilerplate->address(),
2975
- kArgumentsObjectSize);
3001
+ JSObject::kHeaderSize);
2976
3002
 
2977
- // Set the two properties.
2978
- JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
2979
- callee);
2980
- JSObject::cast(result)->InObjectPropertyAtPut(arguments_length_index,
3003
+ // Set the length property.
3004
+ JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex,
2981
3005
  Smi::FromInt(length),
2982
3006
  SKIP_WRITE_BARRIER);
3007
+ // Set the callee property for non-strict mode arguments object only.
3008
+ if (!strict_mode_callee) {
3009
+ JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex,
3010
+ callee);
3011
+ }
2983
3012
 
2984
3013
  // Check the state of the object
2985
3014
  ASSERT(JSObject::cast(result)->HasFastProperties());
@@ -3011,8 +3040,7 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
3011
3040
  int instance_size = fun->shared()->CalculateInstanceSize();
3012
3041
  int in_object_properties = fun->shared()->CalculateInObjectProperties();
3013
3042
  Object* map_obj;
3014
- { MaybeObject* maybe_map_obj =
3015
- Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
3043
+ { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
3016
3044
  if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
3017
3045
  }
3018
3046
 
@@ -3208,7 +3236,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
3208
3236
  PropertyDetails d =
3209
3237
  PropertyDetails(details.attributes(), CALLBACKS, details.index());
3210
3238
  Object* value = descs->GetCallbacksObject(i);
3211
- { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value);
3239
+ { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
3212
3240
  if (!maybe_value->ToObject(&value)) return maybe_value;
3213
3241
  }
3214
3242
 
@@ -3234,7 +3262,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
3234
3262
 
3235
3263
  // Setup the global object as a normalized object.
3236
3264
  global->set_map(new_map);
3237
- global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
3265
+ global->map()->set_instance_descriptors(empty_descriptor_array());
3238
3266
  global->set_properties(dictionary);
3239
3267
 
3240
3268
  // Make sure result is a global object with properties in dictionary.
@@ -3273,7 +3301,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
3273
3301
  { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
3274
3302
  if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3275
3303
  }
3276
- ASSERT(Heap::InNewSpace(clone));
3304
+ ASSERT(InNewSpace(clone));
3277
3305
  // Since we know the clone is allocated in new space, we can copy
3278
3306
  // the contents without worrying about updating the write barrier.
3279
3307
  CopyBlock(HeapObject::cast(clone)->address(),
@@ -3303,7 +3331,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
3303
3331
  }
3304
3332
  // Return the new clone.
3305
3333
  #ifdef ENABLE_LOGGING_AND_PROFILING
3306
- ProducerHeapProfile::RecordJSObjectAllocation(clone);
3334
+ isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
3307
3335
  #endif
3308
3336
  return clone;
3309
3337
  }
@@ -3359,7 +3387,7 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
3359
3387
  // Count the number of characters in the UTF-8 string and check if
3360
3388
  // it is an ASCII string.
3361
3389
  Access<ScannerConstants::Utf8Decoder>
3362
- decoder(ScannerConstants::utf8_decoder());
3390
+ decoder(isolate_->scanner_constants()->utf8_decoder());
3363
3391
  decoder->Reset(string.start(), string.length());
3364
3392
  int chars = 0;
3365
3393
  while (decoder->has_more()) {
@@ -3412,12 +3440,24 @@ Map* Heap::SymbolMapForString(String* string) {
3412
3440
 
3413
3441
  // Find the corresponding symbol map for strings.
3414
3442
  Map* map = string->map();
3415
- if (map == ascii_string_map()) return ascii_symbol_map();
3416
- if (map == string_map()) return symbol_map();
3417
- if (map == cons_string_map()) return cons_symbol_map();
3418
- if (map == cons_ascii_string_map()) return cons_ascii_symbol_map();
3419
- if (map == external_string_map()) return external_symbol_map();
3420
- if (map == external_ascii_string_map()) return external_ascii_symbol_map();
3443
+ if (map == ascii_string_map()) {
3444
+ return ascii_symbol_map();
3445
+ }
3446
+ if (map == string_map()) {
3447
+ return symbol_map();
3448
+ }
3449
+ if (map == cons_string_map()) {
3450
+ return cons_symbol_map();
3451
+ }
3452
+ if (map == cons_ascii_string_map()) {
3453
+ return cons_ascii_symbol_map();
3454
+ }
3455
+ if (map == external_string_map()) {
3456
+ return external_symbol_map();
3457
+ }
3458
+ if (map == external_ascii_string_map()) {
3459
+ return external_ascii_symbol_map();
3460
+ }
3421
3461
  if (map == external_string_with_ascii_data_map()) {
3422
3462
  return external_symbol_with_ascii_data_map();
3423
3463
  }
@@ -3591,7 +3631,7 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
3591
3631
  { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
3592
3632
  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3593
3633
  }
3594
- if (Heap::InNewSpace(obj)) {
3634
+ if (InNewSpace(obj)) {
3595
3635
  HeapObject* dst = HeapObject::cast(obj);
3596
3636
  dst->set_map(map);
3597
3637
  CopyBlock(dst->address() + kPointerSize,
@@ -3623,7 +3663,7 @@ MaybeObject* Heap::AllocateFixedArray(int length) {
3623
3663
  array->set_map(fixed_array_map());
3624
3664
  array->set_length(length);
3625
3665
  // Initialize body.
3626
- ASSERT(!Heap::InNewSpace(undefined_value()));
3666
+ ASSERT(!InNewSpace(undefined_value()));
3627
3667
  MemsetPointer(array->data_start(), undefined_value(), length);
3628
3668
  return result;
3629
3669
  }
@@ -3654,20 +3694,21 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
3654
3694
 
3655
3695
 
3656
3696
  MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
3697
+ Heap* heap,
3657
3698
  int length,
3658
3699
  PretenureFlag pretenure,
3659
3700
  Object* filler) {
3660
3701
  ASSERT(length >= 0);
3661
- ASSERT(Heap::empty_fixed_array()->IsFixedArray());
3662
- if (length == 0) return Heap::empty_fixed_array();
3702
+ ASSERT(heap->empty_fixed_array()->IsFixedArray());
3703
+ if (length == 0) return heap->empty_fixed_array();
3663
3704
 
3664
- ASSERT(!Heap::InNewSpace(filler));
3705
+ ASSERT(!heap->InNewSpace(filler));
3665
3706
  Object* result;
3666
- { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure);
3707
+ { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
3667
3708
  if (!maybe_result->ToObject(&result)) return maybe_result;
3668
3709
  }
3669
3710
 
3670
- HeapObject::cast(result)->set_map(Heap::fixed_array_map());
3711
+ HeapObject::cast(result)->set_map(heap->fixed_array_map());
3671
3712
  FixedArray* array = FixedArray::cast(result);
3672
3713
  array->set_length(length);
3673
3714
  MemsetPointer(array->data_start(), filler, length);
@@ -3676,13 +3717,19 @@ MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
3676
3717
 
3677
3718
 
3678
3719
  MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
3679
- return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
3720
+ return AllocateFixedArrayWithFiller(this,
3721
+ length,
3722
+ pretenure,
3723
+ undefined_value());
3680
3724
  }
3681
3725
 
3682
3726
 
3683
3727
  MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
3684
3728
  PretenureFlag pretenure) {
3685
- return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
3729
+ return AllocateFixedArrayWithFiller(this,
3730
+ length,
3731
+ pretenure,
3732
+ the_hole_value());
3686
3733
  }
3687
3734
 
3688
3735
 
@@ -3702,7 +3749,7 @@ MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
3702
3749
 
3703
3750
  MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
3704
3751
  Object* result;
3705
- { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure);
3752
+ { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
3706
3753
  if (!maybe_result->ToObject(&result)) return maybe_result;
3707
3754
  }
3708
3755
  reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
@@ -3714,7 +3761,7 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
3714
3761
  MaybeObject* Heap::AllocateGlobalContext() {
3715
3762
  Object* result;
3716
3763
  { MaybeObject* maybe_result =
3717
- Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
3764
+ AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
3718
3765
  if (!maybe_result->ToObject(&result)) return maybe_result;
3719
3766
  }
3720
3767
  Context* context = reinterpret_cast<Context*>(result);
@@ -3728,7 +3775,7 @@ MaybeObject* Heap::AllocateGlobalContext() {
3728
3775
  MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
3729
3776
  ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
3730
3777
  Object* result;
3731
- { MaybeObject* maybe_result = Heap::AllocateFixedArray(length);
3778
+ { MaybeObject* maybe_result = AllocateFixedArray(length);
3732
3779
  if (!maybe_result->ToObject(&result)) return maybe_result;
3733
3780
  }
3734
3781
  Context* context = reinterpret_cast<Context*>(result);
@@ -3749,12 +3796,12 @@ MaybeObject* Heap::AllocateWithContext(Context* previous,
3749
3796
  JSObject* extension,
3750
3797
  bool is_catch_context) {
3751
3798
  Object* result;
3752
- { MaybeObject* maybe_result =
3753
- Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3799
+ { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3754
3800
  if (!maybe_result->ToObject(&result)) return maybe_result;
3755
3801
  }
3756
3802
  Context* context = reinterpret_cast<Context*>(result);
3757
- context->set_map(is_catch_context ? catch_context_map() : context_map());
3803
+ context->set_map(is_catch_context ? catch_context_map() :
3804
+ context_map());
3758
3805
  context->set_closure(previous->closure());
3759
3806
  context->set_fcontext(previous->fcontext());
3760
3807
  context->set_previous(previous);
@@ -3770,7 +3817,8 @@ MaybeObject* Heap::AllocateWithContext(Context* previous,
3770
3817
  MaybeObject* Heap::AllocateStruct(InstanceType type) {
3771
3818
  Map* map;
3772
3819
  switch (type) {
3773
- #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
3820
+ #define MAKE_CASE(NAME, Name, name) \
3821
+ case NAME##_TYPE: map = name##_map(); break;
3774
3822
  STRUCT_LIST(MAKE_CASE)
3775
3823
  #undef MAKE_CASE
3776
3824
  default:
@@ -3781,7 +3829,7 @@ STRUCT_LIST(MAKE_CASE)
3781
3829
  AllocationSpace space =
3782
3830
  (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
3783
3831
  Object* result;
3784
- { MaybeObject* maybe_result = Heap::Allocate(map, space);
3832
+ { MaybeObject* maybe_result = Allocate(map, space);
3785
3833
  if (!maybe_result->ToObject(&result)) return maybe_result;
3786
3834
  }
3787
3835
  Struct::cast(result)->InitializeBody(size);
@@ -3795,8 +3843,11 @@ bool Heap::IdleNotification() {
3795
3843
  static const int kIdlesBeforeMarkCompact = 8;
3796
3844
  static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
3797
3845
  static const unsigned int kGCsBetweenCleanup = 4;
3798
- static int number_idle_notifications = 0;
3799
- static unsigned int last_gc_count = gc_count_;
3846
+
3847
+ if (!last_idle_notification_gc_count_init_) {
3848
+ last_idle_notification_gc_count_ = gc_count_;
3849
+ last_idle_notification_gc_count_init_ = true;
3850
+ }
3800
3851
 
3801
3852
  bool uncommit = true;
3802
3853
  bool finished = false;
@@ -3805,56 +3856,56 @@ bool Heap::IdleNotification() {
3805
3856
  // GCs have taken place. This allows another round of cleanup based
3806
3857
  // on idle notifications if enough work has been carried out to
3807
3858
  // provoke a number of garbage collections.
3808
- if (gc_count_ - last_gc_count < kGCsBetweenCleanup) {
3809
- number_idle_notifications =
3810
- Min(number_idle_notifications + 1, kMaxIdleCount);
3859
+ if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
3860
+ number_idle_notifications_ =
3861
+ Min(number_idle_notifications_ + 1, kMaxIdleCount);
3811
3862
  } else {
3812
- number_idle_notifications = 0;
3813
- last_gc_count = gc_count_;
3863
+ number_idle_notifications_ = 0;
3864
+ last_idle_notification_gc_count_ = gc_count_;
3814
3865
  }
3815
3866
 
3816
- if (number_idle_notifications == kIdlesBeforeScavenge) {
3867
+ if (number_idle_notifications_ == kIdlesBeforeScavenge) {
3817
3868
  if (contexts_disposed_ > 0) {
3818
- HistogramTimerScope scope(&Counters::gc_context);
3869
+ HistogramTimerScope scope(isolate_->counters()->gc_context());
3819
3870
  CollectAllGarbage(false);
3820
3871
  } else {
3821
3872
  CollectGarbage(NEW_SPACE);
3822
3873
  }
3823
3874
  new_space_.Shrink();
3824
- last_gc_count = gc_count_;
3825
- } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
3875
+ last_idle_notification_gc_count_ = gc_count_;
3876
+ } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
3826
3877
  // Before doing the mark-sweep collections we clear the
3827
3878
  // compilation cache to avoid hanging on to source code and
3828
3879
  // generated code for cached functions.
3829
- CompilationCache::Clear();
3880
+ isolate_->compilation_cache()->Clear();
3830
3881
 
3831
3882
  CollectAllGarbage(false);
3832
3883
  new_space_.Shrink();
3833
- last_gc_count = gc_count_;
3884
+ last_idle_notification_gc_count_ = gc_count_;
3834
3885
 
3835
- } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
3886
+ } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
3836
3887
  CollectAllGarbage(true);
3837
3888
  new_space_.Shrink();
3838
- last_gc_count = gc_count_;
3889
+ last_idle_notification_gc_count_ = gc_count_;
3890
+ number_idle_notifications_ = 0;
3839
3891
  finished = true;
3840
-
3841
3892
  } else if (contexts_disposed_ > 0) {
3842
3893
  if (FLAG_expose_gc) {
3843
3894
  contexts_disposed_ = 0;
3844
3895
  } else {
3845
- HistogramTimerScope scope(&Counters::gc_context);
3896
+ HistogramTimerScope scope(isolate_->counters()->gc_context());
3846
3897
  CollectAllGarbage(false);
3847
- last_gc_count = gc_count_;
3898
+ last_idle_notification_gc_count_ = gc_count_;
3848
3899
  }
3849
3900
  // If this is the first idle notification, we reset the
3850
3901
  // notification count to avoid letting idle notifications for
3851
3902
  // context disposal garbage collections start a potentially too
3852
3903
  // aggressive idle GC cycle.
3853
- if (number_idle_notifications <= 1) {
3854
- number_idle_notifications = 0;
3904
+ if (number_idle_notifications_ <= 1) {
3905
+ number_idle_notifications_ = 0;
3855
3906
  uncommit = false;
3856
3907
  }
3857
- } else if (number_idle_notifications > kIdlesBeforeMarkCompact) {
3908
+ } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
3858
3909
  // If we have received more than kIdlesBeforeMarkCompact idle
3859
3910
  // notifications we do not perform any cleanup because we don't
3860
3911
  // expect to gain much by doing so.
@@ -3864,7 +3915,7 @@ bool Heap::IdleNotification() {
3864
3915
  // Make sure that we have no pending context disposals and
3865
3916
  // conditionally uncommit from space.
3866
3917
  ASSERT(contexts_disposed_ == 0);
3867
- if (uncommit) Heap::UncommitFromSpace();
3918
+ if (uncommit) UncommitFromSpace();
3868
3919
  return finished;
3869
3920
  }
3870
3921
 
@@ -3873,7 +3924,7 @@ bool Heap::IdleNotification() {
3873
3924
 
3874
3925
  void Heap::Print() {
3875
3926
  if (!HasBeenSetup()) return;
3876
- Top::PrintStack();
3927
+ isolate()->PrintStack();
3877
3928
  AllSpaces spaces;
3878
3929
  for (Space* space = spaces.next(); space != NULL; space = spaces.next())
3879
3930
  space->Print();
@@ -3906,11 +3957,11 @@ void Heap::ReportHeapStatistics(const char* title) {
3906
3957
 
3907
3958
  PrintF("\n");
3908
3959
  PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
3909
- GlobalHandles::PrintStats();
3960
+ isolate_->global_handles()->PrintStats();
3910
3961
  PrintF("\n");
3911
3962
 
3912
3963
  PrintF("Heap statistics : ");
3913
- MemoryAllocator::ReportStatistics();
3964
+ isolate_->memory_allocator()->ReportStatistics();
3914
3965
  PrintF("To space : ");
3915
3966
  new_space_.ReportStatistics();
3916
3967
  PrintF("Old pointer space : ");
@@ -3993,7 +4044,7 @@ static void VerifyPointersUnderWatermark(
3993
4044
  Address start = page->ObjectAreaStart();
3994
4045
  Address end = page->AllocationWatermark();
3995
4046
 
3996
- Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
4047
+ HEAP->IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
3997
4048
  start,
3998
4049
  end,
3999
4050
  visit_dirty_region,
@@ -4014,7 +4065,7 @@ static void VerifyPointersUnderWatermark(LargeObjectSpace* space) {
4014
4065
  // When we are not in GC the Heap::InNewSpace() predicate
4015
4066
  // checks that pointers which satisfy predicate point into
4016
4067
  // the active semispace.
4017
- Heap::InNewSpace(*slot);
4068
+ HEAP->InNewSpace(*slot);
4018
4069
  slot_address += kPointerSize;
4019
4070
  }
4020
4071
  }
@@ -4135,7 +4186,8 @@ void Heap::ZapFromSpace() {
4135
4186
  #endif // DEBUG
4136
4187
 
4137
4188
 
4138
- bool Heap::IteratePointersInDirtyRegion(Address start,
4189
+ bool Heap::IteratePointersInDirtyRegion(Heap* heap,
4190
+ Address start,
4139
4191
  Address end,
4140
4192
  ObjectSlotCallback copy_object_func) {
4141
4193
  Address slot_address = start;
@@ -4143,10 +4195,10 @@ bool Heap::IteratePointersInDirtyRegion(Address start,
4143
4195
 
4144
4196
  while (slot_address < end) {
4145
4197
  Object** slot = reinterpret_cast<Object**>(slot_address);
4146
- if (Heap::InNewSpace(*slot)) {
4198
+ if (heap->InNewSpace(*slot)) {
4147
4199
  ASSERT((*slot)->IsHeapObject());
4148
4200
  copy_object_func(reinterpret_cast<HeapObject**>(slot));
4149
- if (Heap::InNewSpace(*slot)) {
4201
+ if (heap->InNewSpace(*slot)) {
4150
4202
  ASSERT((*slot)->IsHeapObject());
4151
4203
  pointers_to_new_space_found = true;
4152
4204
  }
@@ -4180,14 +4232,16 @@ static bool IteratePointersInDirtyMaps(Address start,
4180
4232
  Address map_address = start;
4181
4233
  bool pointers_to_new_space_found = false;
4182
4234
 
4235
+ Heap* heap = HEAP;
4183
4236
  while (map_address < end) {
4184
- ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address)));
4237
+ ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
4185
4238
  ASSERT(Memory::Object_at(map_address)->IsMap());
4186
4239
 
4187
4240
  Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
4188
4241
  Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
4189
4242
 
4190
- if (Heap::IteratePointersInDirtyRegion(pointer_fields_start,
4243
+ if (Heap::IteratePointersInDirtyRegion(heap,
4244
+ pointer_fields_start,
4191
4245
  pointer_fields_end,
4192
4246
  copy_object_func)) {
4193
4247
  pointers_to_new_space_found = true;
@@ -4201,6 +4255,7 @@ static bool IteratePointersInDirtyMaps(Address start,
4201
4255
 
4202
4256
 
4203
4257
  bool Heap::IteratePointersInDirtyMapsRegion(
4258
+ Heap* heap,
4204
4259
  Address start,
4205
4260
  Address end,
4206
4261
  ObjectSlotCallback copy_object_func) {
@@ -4220,7 +4275,8 @@ bool Heap::IteratePointersInDirtyMapsRegion(
4220
4275
  Min(prev_map + Map::kPointerFieldsEndOffset, end);
4221
4276
 
4222
4277
  contains_pointers_to_new_space =
4223
- IteratePointersInDirtyRegion(pointer_fields_start,
4278
+ IteratePointersInDirtyRegion(heap,
4279
+ pointer_fields_start,
4224
4280
  pointer_fields_end,
4225
4281
  copy_object_func)
4226
4282
  || contains_pointers_to_new_space;
@@ -4242,7 +4298,8 @@ bool Heap::IteratePointersInDirtyMapsRegion(
4242
4298
  Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
4243
4299
 
4244
4300
  contains_pointers_to_new_space =
4245
- IteratePointersInDirtyRegion(pointer_fields_start,
4301
+ IteratePointersInDirtyRegion(heap,
4302
+ pointer_fields_start,
4246
4303
  pointer_fields_end,
4247
4304
  copy_object_func)
4248
4305
  || contains_pointers_to_new_space;
@@ -4262,10 +4319,10 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start,
4262
4319
 
4263
4320
  while (slot_address < end) {
4264
4321
  Object** slot = reinterpret_cast<Object**>(slot_address);
4265
- if (Heap::InFromSpace(*slot)) {
4322
+ if (InFromSpace(*slot)) {
4266
4323
  ASSERT((*slot)->IsHeapObject());
4267
4324
  callback(reinterpret_cast<HeapObject**>(slot));
4268
- if (Heap::InNewSpace(*slot)) {
4325
+ if (InNewSpace(*slot)) {
4269
4326
  ASSERT((*slot)->IsHeapObject());
4270
4327
  marks |= page->GetRegionMaskForAddress(slot_address);
4271
4328
  }
@@ -4304,7 +4361,7 @@ uint32_t Heap::IterateDirtyRegions(
4304
4361
  Address region_end = Min(second_region, area_end);
4305
4362
 
4306
4363
  if (marks & mask) {
4307
- if (visit_dirty_region(region_start, region_end, copy_object_func)) {
4364
+ if (visit_dirty_region(this, region_start, region_end, copy_object_func)) {
4308
4365
  newmarks |= mask;
4309
4366
  }
4310
4367
  }
@@ -4316,7 +4373,10 @@ uint32_t Heap::IterateDirtyRegions(
4316
4373
 
4317
4374
  while (region_end <= area_end) {
4318
4375
  if (marks & mask) {
4319
- if (visit_dirty_region(region_start, region_end, copy_object_func)) {
4376
+ if (visit_dirty_region(this,
4377
+ region_start,
4378
+ region_end,
4379
+ copy_object_func)) {
4320
4380
  newmarks |= mask;
4321
4381
  }
4322
4382
  }
@@ -4332,7 +4392,7 @@ uint32_t Heap::IterateDirtyRegions(
4332
4392
  // with region end. Check whether region covering last part of area is
4333
4393
  // dirty.
4334
4394
  if (marks & mask) {
4335
- if (visit_dirty_region(region_start, area_end, copy_object_func)) {
4395
+ if (visit_dirty_region(this, region_start, area_end, copy_object_func)) {
4336
4396
  newmarks |= mask;
4337
4397
  }
4338
4398
  }
@@ -4398,7 +4458,7 @@ void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
4398
4458
  v->Synchronize("symbol_table");
4399
4459
  if (mode != VISIT_ALL_IN_SCAVENGE) {
4400
4460
  // Scavenge collections have special processing for this.
4401
- ExternalStringTable::Iterate(v);
4461
+ external_string_table_.Iterate(v);
4402
4462
  }
4403
4463
  v->Synchronize("external_string_table");
4404
4464
  }
@@ -4411,42 +4471,42 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
4411
4471
  v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
4412
4472
  v->Synchronize("symbol");
4413
4473
 
4414
- Bootstrapper::Iterate(v);
4474
+ isolate_->bootstrapper()->Iterate(v);
4415
4475
  v->Synchronize("bootstrapper");
4416
- Top::Iterate(v);
4476
+ isolate_->Iterate(v);
4417
4477
  v->Synchronize("top");
4418
4478
  Relocatable::Iterate(v);
4419
4479
  v->Synchronize("relocatable");
4420
4480
 
4421
4481
  #ifdef ENABLE_DEBUGGER_SUPPORT
4422
- Debug::Iterate(v);
4482
+ isolate_->debug()->Iterate(v);
4423
4483
  #endif
4424
4484
  v->Synchronize("debug");
4425
- CompilationCache::Iterate(v);
4485
+ isolate_->compilation_cache()->Iterate(v);
4426
4486
  v->Synchronize("compilationcache");
4427
4487
 
4428
4488
  // Iterate over local handles in handle scopes.
4429
- HandleScopeImplementer::Iterate(v);
4489
+ isolate_->handle_scope_implementer()->Iterate(v);
4430
4490
  v->Synchronize("handlescope");
4431
4491
 
4432
4492
  // Iterate over the builtin code objects and code stubs in the
4433
4493
  // heap. Note that it is not necessary to iterate over code objects
4434
4494
  // on scavenge collections.
4435
4495
  if (mode != VISIT_ALL_IN_SCAVENGE) {
4436
- Builtins::IterateBuiltins(v);
4496
+ isolate_->builtins()->IterateBuiltins(v);
4437
4497
  }
4438
4498
  v->Synchronize("builtins");
4439
4499
 
4440
4500
  // Iterate over global handles.
4441
4501
  if (mode == VISIT_ONLY_STRONG) {
4442
- GlobalHandles::IterateStrongRoots(v);
4502
+ isolate_->global_handles()->IterateStrongRoots(v);
4443
4503
  } else {
4444
- GlobalHandles::IterateAllRoots(v);
4504
+ isolate_->global_handles()->IterateAllRoots(v);
4445
4505
  }
4446
4506
  v->Synchronize("globalhandles");
4447
4507
 
4448
4508
  // Iterate over pointers being held by inactive threads.
4449
- ThreadManager::Iterate(v);
4509
+ isolate_->thread_manager()->Iterate(v);
4450
4510
  v->Synchronize("threadmanager");
4451
4511
 
4452
4512
  // Iterate over the pointers the Serialization/Deserialization code is
@@ -4465,10 +4525,6 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
4465
4525
  }
4466
4526
 
4467
4527
 
4468
- // Flag is set when the heap has been configured. The heap can be repeatedly
4469
- // configured through the API until it is setup.
4470
- static bool heap_configured = false;
4471
-
4472
4528
  // TODO(1236194): Since the heap size is configurable on the command line
4473
4529
  // and through the API, we should gracefully handle the case that the heap
4474
4530
  // size is not big enough to fit all the initial objects.
@@ -4515,7 +4571,7 @@ bool Heap::ConfigureHeap(int max_semispace_size,
4515
4571
  // The old generation is paged.
4516
4572
  max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
4517
4573
 
4518
- heap_configured = true;
4574
+ configured_ = true;
4519
4575
  return true;
4520
4576
  }
4521
4577
 
@@ -4543,11 +4599,13 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
4543
4599
  *stats->cell_space_size = cell_space_->Size();
4544
4600
  *stats->cell_space_capacity = cell_space_->Capacity();
4545
4601
  *stats->lo_space_size = lo_space_->Size();
4546
- GlobalHandles::RecordStats(stats);
4547
- *stats->memory_allocator_size = MemoryAllocator::Size();
4602
+ isolate_->global_handles()->RecordStats(stats);
4603
+ *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
4548
4604
  *stats->memory_allocator_capacity =
4549
- MemoryAllocator::Size() + MemoryAllocator::Available();
4605
+ isolate()->memory_allocator()->Size() +
4606
+ isolate()->memory_allocator()->Available();
4550
4607
  *stats->os_error = OS::GetLastError();
4608
+ isolate()->memory_allocator()->Available();
4551
4609
  if (take_snapshot) {
4552
4610
  HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
4553
4611
  for (HeapObject* obj = iterator.next();
@@ -4579,8 +4637,177 @@ int Heap::PromotedExternalMemorySize() {
4579
4637
  - amount_of_external_allocated_memory_at_last_global_gc_;
4580
4638
  }
4581
4639
 
4640
+ #ifdef DEBUG
4641
+
4642
+ // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
4643
+ static const int kMarkTag = 2;
4644
+
4645
+
4646
+ class HeapDebugUtils {
4647
+ public:
4648
+ explicit HeapDebugUtils(Heap* heap)
4649
+ : search_for_any_global_(false),
4650
+ search_target_(NULL),
4651
+ found_target_(false),
4652
+ object_stack_(20),
4653
+ heap_(heap) {
4654
+ }
4655
+
4656
+ class MarkObjectVisitor : public ObjectVisitor {
4657
+ public:
4658
+ explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4659
+
4660
+ void VisitPointers(Object** start, Object** end) {
4661
+ // Copy all HeapObject pointers in [start, end)
4662
+ for (Object** p = start; p < end; p++) {
4663
+ if ((*p)->IsHeapObject())
4664
+ utils_->MarkObjectRecursively(p);
4665
+ }
4666
+ }
4667
+
4668
+ HeapDebugUtils* utils_;
4669
+ };
4670
+
4671
+ void MarkObjectRecursively(Object** p) {
4672
+ if (!(*p)->IsHeapObject()) return;
4673
+
4674
+ HeapObject* obj = HeapObject::cast(*p);
4675
+
4676
+ Object* map = obj->map();
4677
+
4678
+ if (!map->IsHeapObject()) return; // visited before
4679
+
4680
+ if (found_target_) return; // stop if target found
4681
+ object_stack_.Add(obj);
4682
+ if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
4683
+ (!search_for_any_global_ && (obj == search_target_))) {
4684
+ found_target_ = true;
4685
+ return;
4686
+ }
4687
+
4688
+ // not visited yet
4689
+ Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
4690
+
4691
+ Address map_addr = map_p->address();
4692
+
4693
+ obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
4694
+
4695
+ MarkObjectRecursively(&map);
4696
+
4697
+ MarkObjectVisitor mark_visitor(this);
4698
+
4699
+ obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
4700
+ &mark_visitor);
4701
+
4702
+ if (!found_target_) // don't pop if found the target
4703
+ object_stack_.RemoveLast();
4704
+ }
4705
+
4706
+
4707
+ class UnmarkObjectVisitor : public ObjectVisitor {
4708
+ public:
4709
+ explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4710
+
4711
+ void VisitPointers(Object** start, Object** end) {
4712
+ // Copy all HeapObject pointers in [start, end)
4713
+ for (Object** p = start; p < end; p++) {
4714
+ if ((*p)->IsHeapObject())
4715
+ utils_->UnmarkObjectRecursively(p);
4716
+ }
4717
+ }
4718
+
4719
+ HeapDebugUtils* utils_;
4720
+ };
4721
+
4722
+
4723
+ void UnmarkObjectRecursively(Object** p) {
4724
+ if (!(*p)->IsHeapObject()) return;
4725
+
4726
+ HeapObject* obj = HeapObject::cast(*p);
4727
+
4728
+ Object* map = obj->map();
4729
+
4730
+ if (map->IsHeapObject()) return; // unmarked already
4731
+
4732
+ Address map_addr = reinterpret_cast<Address>(map);
4733
+
4734
+ map_addr -= kMarkTag;
4735
+
4736
+ ASSERT_TAG_ALIGNED(map_addr);
4737
+
4738
+ HeapObject* map_p = HeapObject::FromAddress(map_addr);
4739
+
4740
+ obj->set_map(reinterpret_cast<Map*>(map_p));
4741
+
4742
+ UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
4743
+
4744
+ UnmarkObjectVisitor unmark_visitor(this);
4745
+
4746
+ obj->IterateBody(Map::cast(map_p)->instance_type(),
4747
+ obj->SizeFromMap(Map::cast(map_p)),
4748
+ &unmark_visitor);
4749
+ }
4750
+
4751
+
4752
+ void MarkRootObjectRecursively(Object** root) {
4753
+ if (search_for_any_global_) {
4754
+ ASSERT(search_target_ == NULL);
4755
+ } else {
4756
+ ASSERT(search_target_->IsHeapObject());
4757
+ }
4758
+ found_target_ = false;
4759
+ object_stack_.Clear();
4760
+
4761
+ MarkObjectRecursively(root);
4762
+ UnmarkObjectRecursively(root);
4763
+
4764
+ if (found_target_) {
4765
+ PrintF("=====================================\n");
4766
+ PrintF("==== Path to object ====\n");
4767
+ PrintF("=====================================\n\n");
4768
+
4769
+ ASSERT(!object_stack_.is_empty());
4770
+ for (int i = 0; i < object_stack_.length(); i++) {
4771
+ if (i > 0) PrintF("\n |\n |\n V\n\n");
4772
+ Object* obj = object_stack_[i];
4773
+ obj->Print();
4774
+ }
4775
+ PrintF("=====================================\n");
4776
+ }
4777
+ }
4778
+
4779
+ // Helper class for visiting HeapObjects recursively.
4780
+ class MarkRootVisitor: public ObjectVisitor {
4781
+ public:
4782
+ explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
4783
+
4784
+ void VisitPointers(Object** start, Object** end) {
4785
+ // Visit all HeapObject pointers in [start, end)
4786
+ for (Object** p = start; p < end; p++) {
4787
+ if ((*p)->IsHeapObject())
4788
+ utils_->MarkRootObjectRecursively(p);
4789
+ }
4790
+ }
4791
+
4792
+ HeapDebugUtils* utils_;
4793
+ };
4794
+
4795
+ bool search_for_any_global_;
4796
+ Object* search_target_;
4797
+ bool found_target_;
4798
+ List<Object*> object_stack_;
4799
+ Heap* heap_;
4800
+
4801
+ friend class Heap;
4802
+ };
4803
+
4804
+ #endif
4582
4805
 
4583
4806
  bool Heap::Setup(bool create_heap_objects) {
4807
+ #ifdef DEBUG
4808
+ debug_utils_ = new HeapDebugUtils(this);
4809
+ #endif
4810
+
4584
4811
  // Initialize heap spaces and initial maps and objects. Whenever something
4585
4812
  // goes wrong, just return false. The caller should check the results and
4586
4813
  // call Heap::TearDown() to release allocated memory.
@@ -4589,13 +4816,19 @@ bool Heap::Setup(bool create_heap_objects) {
4589
4816
  // Configuration is based on the flags new-space-size (really the semispace
4590
4817
  // size) and old-space-size if set or the initial values of semispace_size_
4591
4818
  // and old_generation_size_ otherwise.
4592
- if (!heap_configured) {
4819
+ if (!configured_) {
4593
4820
  if (!ConfigureHeapDefault()) return false;
4594
4821
  }
4595
4822
 
4596
- ScavengingVisitor::Initialize();
4597
- NewSpaceScavenger::Initialize();
4598
- MarkCompactCollector::Initialize();
4823
+ gc_initializer_mutex->Lock();
4824
+ static bool initialized_gc = false;
4825
+ if (!initialized_gc) {
4826
+ initialized_gc = true;
4827
+ InitializeScavengingVisitorsTables();
4828
+ NewSpaceScavenger::Initialize();
4829
+ MarkCompactCollector::Initialize();
4830
+ }
4831
+ gc_initializer_mutex->Unlock();
4599
4832
 
4600
4833
  MarkMapPointersAsEncoded(false);
4601
4834
 
@@ -4603,9 +4836,11 @@ bool Heap::Setup(bool create_heap_objects) {
4603
4836
  // space. The chunk is double the size of the requested reserved
4604
4837
  // new space size to ensure that we can find a pair of semispaces that
4605
4838
  // are contiguous and aligned to their size.
4606
- if (!MemoryAllocator::Setup(MaxReserved(), MaxExecutableSize())) return false;
4839
+ if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
4840
+ return false;
4607
4841
  void* chunk =
4608
- MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
4842
+ isolate_->memory_allocator()->ReserveInitialChunk(
4843
+ 4 * reserved_semispace_size_);
4609
4844
  if (chunk == NULL) return false;
4610
4845
 
4611
4846
  // Align the pair of semispaces to their size, which must be a power
@@ -4618,13 +4853,19 @@ bool Heap::Setup(bool create_heap_objects) {
4618
4853
 
4619
4854
  // Initialize old pointer space.
4620
4855
  old_pointer_space_ =
4621
- new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
4856
+ new OldSpace(this,
4857
+ max_old_generation_size_,
4858
+ OLD_POINTER_SPACE,
4859
+ NOT_EXECUTABLE);
4622
4860
  if (old_pointer_space_ == NULL) return false;
4623
4861
  if (!old_pointer_space_->Setup(NULL, 0)) return false;
4624
4862
 
4625
4863
  // Initialize old data space.
4626
4864
  old_data_space_ =
4627
- new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
4865
+ new OldSpace(this,
4866
+ max_old_generation_size_,
4867
+ OLD_DATA_SPACE,
4868
+ NOT_EXECUTABLE);
4628
4869
  if (old_data_space_ == NULL) return false;
4629
4870
  if (!old_data_space_->Setup(NULL, 0)) return false;
4630
4871
 
@@ -4633,18 +4874,18 @@ bool Heap::Setup(bool create_heap_objects) {
4633
4874
  // On 64-bit platform(s), we put all code objects in a 2 GB range of
4634
4875
  // virtual address space, so that they can call each other with near calls.
4635
4876
  if (code_range_size_ > 0) {
4636
- if (!CodeRange::Setup(code_range_size_)) {
4877
+ if (!isolate_->code_range()->Setup(code_range_size_)) {
4637
4878
  return false;
4638
4879
  }
4639
4880
  }
4640
4881
 
4641
4882
  code_space_ =
4642
- new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
4883
+ new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
4643
4884
  if (code_space_ == NULL) return false;
4644
4885
  if (!code_space_->Setup(NULL, 0)) return false;
4645
4886
 
4646
4887
  // Initialize map space.
4647
- map_space_ = new MapSpace(FLAG_use_big_map_space
4888
+ map_space_ = new MapSpace(this, FLAG_use_big_map_space
4648
4889
  ? max_old_generation_size_
4649
4890
  : MapSpace::kMaxMapPageIndex * Page::kPageSize,
4650
4891
  FLAG_max_map_space_pages,
@@ -4653,14 +4894,14 @@ bool Heap::Setup(bool create_heap_objects) {
4653
4894
  if (!map_space_->Setup(NULL, 0)) return false;
4654
4895
 
4655
4896
  // Initialize global property cell space.
4656
- cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
4897
+ cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
4657
4898
  if (cell_space_ == NULL) return false;
4658
4899
  if (!cell_space_->Setup(NULL, 0)) return false;
4659
4900
 
4660
4901
  // The large object code space may contain code or data. We set the memory
4661
4902
  // to be non-executable here for safety, but this means we need to enable it
4662
4903
  // explicitly when allocating large code objects.
4663
- lo_space_ = new LargeObjectSpace(LO_SPACE);
4904
+ lo_space_ = new LargeObjectSpace(this, LO_SPACE);
4664
4905
  if (lo_space_ == NULL) return false;
4665
4906
  if (!lo_space_->Setup()) return false;
4666
4907
 
@@ -4675,12 +4916,12 @@ bool Heap::Setup(bool create_heap_objects) {
4675
4916
  global_contexts_list_ = undefined_value();
4676
4917
  }
4677
4918
 
4678
- LOG(IntPtrTEvent("heap-capacity", Capacity()));
4679
- LOG(IntPtrTEvent("heap-available", Available()));
4919
+ LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
4920
+ LOG(isolate_, IntPtrTEvent("heap-available", Available()));
4680
4921
 
4681
4922
  #ifdef ENABLE_LOGGING_AND_PROFILING
4682
4923
  // This should be called only after initial objects have been created.
4683
- ProducerHeapProfile::Setup();
4924
+ isolate_->producer_heap_profile()->Setup();
4684
4925
  #endif
4685
4926
 
4686
4927
  return true;
@@ -4688,6 +4929,8 @@ bool Heap::Setup(bool create_heap_objects) {
4688
4929
 
4689
4930
 
4690
4931
  void Heap::SetStackLimits() {
4932
+ ASSERT(isolate_ != NULL);
4933
+ ASSERT(isolate_ == isolate());
4691
4934
  // On 64 bit machines, pointers are generally out of range of Smis. We write
4692
4935
  // something that looks like an out of range Smi to the GC.
4693
4936
 
@@ -4695,10 +4938,10 @@ void Heap::SetStackLimits() {
4695
4938
  // These are actually addresses, but the tag makes the GC ignore it.
4696
4939
  roots_[kStackLimitRootIndex] =
4697
4940
  reinterpret_cast<Object*>(
4698
- (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
4941
+ (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
4699
4942
  roots_[kRealStackLimitRootIndex] =
4700
4943
  reinterpret_cast<Object*>(
4701
- (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
4944
+ (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
4702
4945
  }
4703
4946
 
4704
4947
 
@@ -4708,16 +4951,16 @@ void Heap::TearDown() {
4708
4951
  PrintF("gc_count=%d ", gc_count_);
4709
4952
  PrintF("mark_sweep_count=%d ", ms_count_);
4710
4953
  PrintF("mark_compact_count=%d ", mc_count_);
4711
- PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
4712
- PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
4954
+ PrintF("max_gc_pause=%d ", get_max_gc_pause());
4955
+ PrintF("min_in_mutator=%d ", get_min_in_mutator());
4713
4956
  PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
4714
- GCTracer::get_max_alive_after_gc());
4957
+ get_max_alive_after_gc());
4715
4958
  PrintF("\n\n");
4716
4959
  }
4717
4960
 
4718
- GlobalHandles::TearDown();
4961
+ isolate_->global_handles()->TearDown();
4719
4962
 
4720
- ExternalStringTable::TearDown();
4963
+ external_string_table_.TearDown();
4721
4964
 
4722
4965
  new_space_.TearDown();
4723
4966
 
@@ -4757,7 +5000,12 @@ void Heap::TearDown() {
4757
5000
  lo_space_ = NULL;
4758
5001
  }
4759
5002
 
4760
- MemoryAllocator::TearDown();
5003
+ isolate_->memory_allocator()->TearDown();
5004
+
5005
+ #ifdef DEBUG
5006
+ delete debug_utils_;
5007
+ debug_utils_ = NULL;
5008
+ #endif
4761
5009
  }
4762
5010
 
4763
5011
 
@@ -4846,7 +5094,7 @@ class PrintHandleVisitor: public ObjectVisitor {
4846
5094
  void Heap::PrintHandles() {
4847
5095
  PrintF("Handles:\n");
4848
5096
  PrintHandleVisitor v;
4849
- HandleScopeImplementer::Iterate(&v);
5097
+ isolate_->handle_scope_implementer()->Iterate(&v);
4850
5098
  }
4851
5099
 
4852
5100
  #endif
@@ -4855,19 +5103,19 @@ void Heap::PrintHandles() {
4855
5103
  Space* AllSpaces::next() {
4856
5104
  switch (counter_++) {
4857
5105
  case NEW_SPACE:
4858
- return Heap::new_space();
5106
+ return HEAP->new_space();
4859
5107
  case OLD_POINTER_SPACE:
4860
- return Heap::old_pointer_space();
5108
+ return HEAP->old_pointer_space();
4861
5109
  case OLD_DATA_SPACE:
4862
- return Heap::old_data_space();
5110
+ return HEAP->old_data_space();
4863
5111
  case CODE_SPACE:
4864
- return Heap::code_space();
5112
+ return HEAP->code_space();
4865
5113
  case MAP_SPACE:
4866
- return Heap::map_space();
5114
+ return HEAP->map_space();
4867
5115
  case CELL_SPACE:
4868
- return Heap::cell_space();
5116
+ return HEAP->cell_space();
4869
5117
  case LO_SPACE:
4870
- return Heap::lo_space();
5118
+ return HEAP->lo_space();
4871
5119
  default:
4872
5120
  return NULL;
4873
5121
  }
@@ -4877,15 +5125,15 @@ Space* AllSpaces::next() {
4877
5125
  PagedSpace* PagedSpaces::next() {
4878
5126
  switch (counter_++) {
4879
5127
  case OLD_POINTER_SPACE:
4880
- return Heap::old_pointer_space();
5128
+ return HEAP->old_pointer_space();
4881
5129
  case OLD_DATA_SPACE:
4882
- return Heap::old_data_space();
5130
+ return HEAP->old_data_space();
4883
5131
  case CODE_SPACE:
4884
- return Heap::code_space();
5132
+ return HEAP->code_space();
4885
5133
  case MAP_SPACE:
4886
- return Heap::map_space();
5134
+ return HEAP->map_space();
4887
5135
  case CELL_SPACE:
4888
- return Heap::cell_space();
5136
+ return HEAP->cell_space();
4889
5137
  default:
4890
5138
  return NULL;
4891
5139
  }
@@ -4896,11 +5144,11 @@ PagedSpace* PagedSpaces::next() {
4896
5144
  OldSpace* OldSpaces::next() {
4897
5145
  switch (counter_++) {
4898
5146
  case OLD_POINTER_SPACE:
4899
- return Heap::old_pointer_space();
5147
+ return HEAP->old_pointer_space();
4900
5148
  case OLD_DATA_SPACE:
4901
- return Heap::old_data_space();
5149
+ return HEAP->old_data_space();
4902
5150
  case CODE_SPACE:
4903
- return Heap::code_space();
5151
+ return HEAP->code_space();
4904
5152
  default:
4905
5153
  return NULL;
4906
5154
  }
@@ -4955,25 +5203,25 @@ ObjectIterator* SpaceIterator::CreateIterator() {
4955
5203
 
4956
5204
  switch (current_space_) {
4957
5205
  case NEW_SPACE:
4958
- iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_);
5206
+ iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
4959
5207
  break;
4960
5208
  case OLD_POINTER_SPACE:
4961
- iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_);
5209
+ iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
4962
5210
  break;
4963
5211
  case OLD_DATA_SPACE:
4964
- iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_);
5212
+ iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
4965
5213
  break;
4966
5214
  case CODE_SPACE:
4967
- iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_);
5215
+ iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
4968
5216
  break;
4969
5217
  case MAP_SPACE:
4970
- iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_);
5218
+ iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
4971
5219
  break;
4972
5220
  case CELL_SPACE:
4973
- iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_);
5221
+ iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
4974
5222
  break;
4975
5223
  case LO_SPACE:
4976
- iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_);
5224
+ iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
4977
5225
  break;
4978
5226
  }
4979
5227
 
@@ -5007,16 +5255,17 @@ class FreeListNodesFilter : public HeapObjectsFilter {
5007
5255
 
5008
5256
  private:
5009
5257
  void MarkFreeListNodes() {
5010
- Heap::old_pointer_space()->MarkFreeListNodes();
5011
- Heap::old_data_space()->MarkFreeListNodes();
5012
- MarkCodeSpaceFreeListNodes();
5013
- Heap::map_space()->MarkFreeListNodes();
5014
- Heap::cell_space()->MarkFreeListNodes();
5258
+ Heap* heap = HEAP;
5259
+ heap->old_pointer_space()->MarkFreeListNodes();
5260
+ heap->old_data_space()->MarkFreeListNodes();
5261
+ MarkCodeSpaceFreeListNodes(heap);
5262
+ heap->map_space()->MarkFreeListNodes();
5263
+ heap->cell_space()->MarkFreeListNodes();
5015
5264
  }
5016
5265
 
5017
- void MarkCodeSpaceFreeListNodes() {
5266
+ void MarkCodeSpaceFreeListNodes(Heap* heap) {
5018
5267
  // For code space, using FreeListNode::IsFreeListNode is OK.
5019
- HeapObjectIterator iter(Heap::code_space());
5268
+ HeapObjectIterator iter(heap->code_space());
5020
5269
  for (HeapObject* obj = iter.next_object();
5021
5270
  obj != NULL;
5022
5271
  obj = iter.next_object()) {
@@ -5078,7 +5327,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
5078
5327
  obj->SetMark();
5079
5328
  }
5080
5329
  UnmarkingVisitor visitor;
5081
- Heap::IterateRoots(&visitor, VISIT_ALL);
5330
+ HEAP->IterateRoots(&visitor, VISIT_ALL);
5082
5331
  while (visitor.can_process())
5083
5332
  visitor.ProcessNext();
5084
5333
  }
@@ -5336,7 +5585,11 @@ void PathTracer::ProcessResults() {
5336
5585
  for (int i = 0; i < object_stack_.length(); i++) {
5337
5586
  if (i > 0) PrintF("\n |\n |\n V\n\n");
5338
5587
  Object* obj = object_stack_[i];
5588
+ #ifdef OBJECT_PRINT
5339
5589
  obj->Print();
5590
+ #else
5591
+ obj->ShortPrint();
5592
+ #endif
5340
5593
  }
5341
5594
  PrintF("=====================================\n");
5342
5595
  }
@@ -5377,7 +5630,7 @@ static intptr_t CountTotalHolesSize() {
5377
5630
  }
5378
5631
 
5379
5632
 
5380
- GCTracer::GCTracer()
5633
+ GCTracer::GCTracer(Heap* heap)
5381
5634
  : start_time_(0.0),
5382
5635
  start_size_(0),
5383
5636
  gc_count_(0),
@@ -5386,14 +5639,16 @@ GCTracer::GCTracer()
5386
5639
  marked_count_(0),
5387
5640
  allocated_since_last_gc_(0),
5388
5641
  spent_in_mutator_(0),
5389
- promoted_objects_size_(0) {
5642
+ promoted_objects_size_(0),
5643
+ heap_(heap) {
5390
5644
  // These two fields reflect the state of the previous full collection.
5391
5645
  // Set them before they are changed by the collector.
5392
- previous_has_compacted_ = MarkCompactCollector::HasCompacted();
5393
- previous_marked_count_ = MarkCompactCollector::previous_marked_count();
5646
+ previous_has_compacted_ = heap_->mark_compact_collector_.HasCompacted();
5647
+ previous_marked_count_ =
5648
+ heap_->mark_compact_collector_.previous_marked_count();
5394
5649
  if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
5395
5650
  start_time_ = OS::TimeCurrentMillis();
5396
- start_size_ = Heap::SizeOfObjects();
5651
+ start_size_ = heap_->SizeOfObjects();
5397
5652
 
5398
5653
  for (int i = 0; i < Scope::kNumberOfScopes; i++) {
5399
5654
  scopes_[i] = 0;
@@ -5401,10 +5656,11 @@ GCTracer::GCTracer()
5401
5656
 
5402
5657
  in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
5403
5658
 
5404
- allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_;
5659
+ allocated_since_last_gc_ =
5660
+ heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
5405
5661
 
5406
- if (last_gc_end_timestamp_ > 0) {
5407
- spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0);
5662
+ if (heap_->last_gc_end_timestamp_ > 0) {
5663
+ spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
5408
5664
  }
5409
5665
  }
5410
5666
 
@@ -5413,20 +5669,21 @@ GCTracer::~GCTracer() {
5413
5669
  // Printf ONE line iff flag is set.
5414
5670
  if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
5415
5671
 
5416
- bool first_gc = (last_gc_end_timestamp_ == 0);
5672
+ bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
5417
5673
 
5418
- alive_after_last_gc_ = Heap::SizeOfObjects();
5419
- last_gc_end_timestamp_ = OS::TimeCurrentMillis();
5674
+ heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
5675
+ heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
5420
5676
 
5421
- int time = static_cast<int>(last_gc_end_timestamp_ - start_time_);
5677
+ int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
5422
5678
 
5423
5679
  // Update cumulative GC statistics if required.
5424
5680
  if (FLAG_print_cumulative_gc_stat) {
5425
- max_gc_pause_ = Max(max_gc_pause_, time);
5426
- max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_);
5681
+ heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
5682
+ heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
5683
+ heap_->alive_after_last_gc_);
5427
5684
  if (!first_gc) {
5428
- min_in_mutator_ = Min(min_in_mutator_,
5429
- static_cast<int>(spent_in_mutator_));
5685
+ heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
5686
+ static_cast<int>(spent_in_mutator_));
5430
5687
  }
5431
5688
  }
5432
5689
 
@@ -5451,7 +5708,8 @@ GCTracer::~GCTracer() {
5451
5708
  PrintF("s");
5452
5709
  break;
5453
5710
  case MARK_COMPACTOR:
5454
- PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
5711
+ PrintF("%s",
5712
+ heap_->mark_compact_collector_.HasCompacted() ? "mc" : "ms");
5455
5713
  break;
5456
5714
  default:
5457
5715
  UNREACHABLE();
@@ -5465,7 +5723,7 @@ GCTracer::~GCTracer() {
5465
5723
  PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
5466
5724
 
5467
5725
  PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
5468
- PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects());
5726
+ PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
5469
5727
  PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
5470
5728
  in_free_list_or_wasted_before_gc_);
5471
5729
  PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
@@ -5477,7 +5735,7 @@ GCTracer::~GCTracer() {
5477
5735
  }
5478
5736
 
5479
5737
  #if defined(ENABLE_LOGGING_AND_PROFILING)
5480
- Heap::PrintShortHeapStatistics();
5738
+ heap_->PrintShortHeapStatistics();
5481
5739
  #endif
5482
5740
  }
5483
5741
 
@@ -5487,8 +5745,8 @@ const char* GCTracer::CollectorString() {
5487
5745
  case SCAVENGER:
5488
5746
  return "Scavenge";
5489
5747
  case MARK_COMPACTOR:
5490
- return MarkCompactCollector::HasCompacted() ? "Mark-compact"
5491
- : "Mark-sweep";
5748
+ return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
5749
+ : "Mark-sweep";
5492
5750
  }
5493
5751
  return "Unknown GC";
5494
5752
  }
@@ -5508,13 +5766,13 @@ int KeyedLookupCache::Lookup(Map* map, String* name) {
5508
5766
  if ((key.map == map) && key.name->Equals(name)) {
5509
5767
  return field_offsets_[index];
5510
5768
  }
5511
- return -1;
5769
+ return kNotFound;
5512
5770
  }
5513
5771
 
5514
5772
 
5515
5773
  void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
5516
5774
  String* symbol;
5517
- if (Heap::LookupSymbolIfExists(name, &symbol)) {
5775
+ if (HEAP->LookupSymbolIfExists(name, &symbol)) {
5518
5776
  int index = Hash(map, symbol);
5519
5777
  Key& key = keys_[index];
5520
5778
  key.map = map;
@@ -5529,35 +5787,24 @@ void KeyedLookupCache::Clear() {
5529
5787
  }
5530
5788
 
5531
5789
 
5532
- KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
5533
-
5534
-
5535
- int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
5536
-
5537
-
5538
5790
  void DescriptorLookupCache::Clear() {
5539
5791
  for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
5540
5792
  }
5541
5793
 
5542
5794
 
5543
- DescriptorLookupCache::Key
5544
- DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
5545
-
5546
- int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
5547
-
5548
-
5549
5795
  #ifdef DEBUG
5550
5796
  void Heap::GarbageCollectionGreedyCheck() {
5551
5797
  ASSERT(FLAG_gc_greedy);
5552
- if (Bootstrapper::IsActive()) return;
5798
+ if (isolate_->bootstrapper()->IsActive()) return;
5553
5799
  if (disallow_allocation_failure()) return;
5554
5800
  CollectGarbage(NEW_SPACE);
5555
5801
  }
5556
5802
  #endif
5557
5803
 
5558
5804
 
5559
- TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
5560
- : type_(t) {
5805
+ TranscendentalCache::SubCache::SubCache(Type t)
5806
+ : type_(t),
5807
+ isolate_(Isolate::Current()) {
5561
5808
  uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't
5562
5809
  uint32_t in1 = 0xffffffffu; // generated by the FPU.
5563
5810
  for (int i = 0; i < kCacheSize; i++) {
@@ -5568,9 +5815,6 @@ TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
5568
5815
  }
5569
5816
 
5570
5817
 
5571
- TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
5572
-
5573
-
5574
5818
  void TranscendentalCache::Clear() {
5575
5819
  for (int i = 0; i < kNumberOfCaches; i++) {
5576
5820
  if (caches_[i] != NULL) {
@@ -5584,8 +5828,8 @@ void TranscendentalCache::Clear() {
5584
5828
  void ExternalStringTable::CleanUp() {
5585
5829
  int last = 0;
5586
5830
  for (int i = 0; i < new_space_strings_.length(); ++i) {
5587
- if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
5588
- if (Heap::InNewSpace(new_space_strings_[i])) {
5831
+ if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
5832
+ if (heap_->InNewSpace(new_space_strings_[i])) {
5589
5833
  new_space_strings_[last++] = new_space_strings_[i];
5590
5834
  } else {
5591
5835
  old_space_strings_.Add(new_space_strings_[i]);
@@ -5594,8 +5838,8 @@ void ExternalStringTable::CleanUp() {
5594
5838
  new_space_strings_.Rewind(last);
5595
5839
  last = 0;
5596
5840
  for (int i = 0; i < old_space_strings_.length(); ++i) {
5597
- if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
5598
- ASSERT(!Heap::InNewSpace(old_space_strings_[i]));
5841
+ if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
5842
+ ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
5599
5843
  old_space_strings_[last++] = old_space_strings_[i];
5600
5844
  }
5601
5845
  old_space_strings_.Rewind(last);
@@ -5609,7 +5853,4 @@ void ExternalStringTable::TearDown() {
5609
5853
  }
5610
5854
 
5611
5855
 
5612
- List<Object*> ExternalStringTable::new_space_strings_;
5613
- List<Object*> ExternalStringTable::old_space_strings_;
5614
-
5615
5856
  } } // namespace v8::internal